1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMViewFromOptions" 9 /* 10 DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed. 11 12 Collective on Vec 13 14 Input Parameters: 15 + dm - the DM 16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm' 17 - optionname - option to activate viewing 18 19 Level: intermediate 20 21 .keywords: DM, view, options, database 22 .seealso: VecViewFromOptions(), MatViewFromOptions() 23 */ 24 PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[]) 25 { 26 PetscErrorCode ierr; 27 PetscBool flg; 28 PetscViewer viewer; 29 PetscViewerFormat format; 30 31 PetscFunctionBegin; 32 if (prefix) { 33 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 34 } else { 35 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 36 } 37 if (flg) { 38 ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr); 39 ierr = DMView(dm,viewer);CHKERRQ(ierr); 40 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 41 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 42 } 43 PetscFunctionReturn(0); 44 } 45 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMCreate" 49 /*@ 50 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 51 52 If you never call DMSetType() it will generate an 53 error when you try to use the vector. 54 55 Collective on MPI_Comm 56 57 Input Parameter: 58 . comm - The communicator for the DM object 59 60 Output Parameter: 61 . dm - The DM object 62 63 Level: beginner 64 65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 66 @*/ 67 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 68 { 69 DM v; 70 PetscErrorCode ierr; 71 72 PetscFunctionBegin; 73 PetscValidPointer(dm,2); 74 *dm = NULL; 75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 76 ierr = VecInitializePackage();CHKERRQ(ierr); 77 ierr = MatInitializePackage();CHKERRQ(ierr); 78 ierr = DMInitializePackage();CHKERRQ(ierr); 79 #endif 80 81 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 82 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 83 84 85 v->ltogmap = NULL; 86 v->ltogmapb = NULL; 87 v->bs = 1; 88 v->coloringtype = IS_COLORING_GLOBAL; 89 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 90 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 91 v->defaultSection = NULL; 92 v->defaultGlobalSection = NULL; 93 { 94 PetscInt i; 95 for (i = 0; i < 10; ++i) { 96 v->nullspaceConstructors[i] = NULL; 97 } 98 } 99 v->numFields = 0; 100 v->fields = NULL; 101 102 *dm = v; 103 PetscFunctionReturn(0); 104 } 105 106 #undef __FUNCT__ 107 #define __FUNCT__ "DMSetVecType" 108 /*@C 109 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 110 111 Logically Collective on DMDA 112 113 Input Parameter: 114 + da - initial distributed array 115 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 116 117 Options Database: 118 . -dm_vec_type ctype 119 120 Level: intermediate 121 122 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 123 @*/ 124 PetscErrorCode DMSetVecType(DM da,VecType ctype) 125 { 126 PetscErrorCode ierr; 127 128 PetscFunctionBegin; 129 PetscValidHeaderSpecific(da,DM_CLASSID,1); 130 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 131 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 132 PetscFunctionReturn(0); 133 } 134 135 #undef __FUNCT__ 136 #define __FUNCT__ "VecGetDM" 137 /*@ 138 VecGetDM - Gets the DM defining the data layout of the vector 139 140 Not collective 141 142 Input Parameter: 143 . v - The Vec 144 145 Output Parameter: 146 . dm - The DM 147 148 Level: intermediate 149 150 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 151 @*/ 152 PetscErrorCode VecGetDM(Vec v, DM *dm) 153 { 154 PetscErrorCode ierr; 155 156 PetscFunctionBegin; 157 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 158 PetscValidPointer(dm,2); 159 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 160 PetscFunctionReturn(0); 161 } 162 163 #undef __FUNCT__ 164 #define __FUNCT__ "VecSetDM" 165 /*@ 166 VecSetDM - Sets the DM defining the data layout of the vector 167 168 Not collective 169 170 Input Parameters: 171 + v - The Vec 172 - dm - The DM 173 174 Level: intermediate 175 176 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 177 @*/ 178 PetscErrorCode VecSetDM(Vec v, DM dm) 179 { 180 PetscErrorCode ierr; 181 182 PetscFunctionBegin; 183 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 184 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 185 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 186 PetscFunctionReturn(0); 187 } 188 189 #undef __FUNCT__ 190 #define __FUNCT__ "DMSetMatType" 191 /*@C 192 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 193 194 Logically Collective on DM 195 196 Input Parameter: 197 + dm - the DM context 198 . ctype - the matrix type 199 200 Options Database: 201 . -dm_mat_type ctype 202 203 Level: intermediate 204 205 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 206 @*/ 207 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 208 { 209 PetscErrorCode ierr; 210 211 PetscFunctionBegin; 212 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 213 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 214 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 215 PetscFunctionReturn(0); 216 } 217 218 #undef __FUNCT__ 219 #define __FUNCT__ "MatGetDM" 220 /*@ 221 MatGetDM - Gets the DM defining the data layout of the matrix 222 223 Not collective 224 225 Input Parameter: 226 . A - The Mat 227 228 Output Parameter: 229 . dm - The DM 230 231 Level: intermediate 232 233 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 234 @*/ 235 PetscErrorCode MatGetDM(Mat A, DM *dm) 236 { 237 PetscErrorCode ierr; 238 239 PetscFunctionBegin; 240 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 241 PetscValidPointer(dm,2); 242 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 243 PetscFunctionReturn(0); 244 } 245 246 #undef __FUNCT__ 247 #define __FUNCT__ "MatSetDM" 248 /*@ 249 MatSetDM - Sets the DM defining the data layout of the matrix 250 251 Not collective 252 253 Input Parameters: 254 + A - The Mat 255 - dm - The DM 256 257 Level: intermediate 258 259 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 260 @*/ 261 PetscErrorCode MatSetDM(Mat A, DM dm) 262 { 263 PetscErrorCode ierr; 264 265 PetscFunctionBegin; 266 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 267 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 268 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 269 PetscFunctionReturn(0); 270 } 271 272 #undef __FUNCT__ 273 #define __FUNCT__ "DMSetOptionsPrefix" 274 /*@C 275 DMSetOptionsPrefix - Sets the prefix used for searching for all 276 DMDA options in the database. 277 278 Logically Collective on DMDA 279 280 Input Parameter: 281 + da - the DMDA context 282 - prefix - the prefix to prepend to all option names 283 284 Notes: 285 A hyphen (-) must NOT be given at the beginning of the prefix name. 286 The first character of all runtime options is AUTOMATICALLY the hyphen. 287 288 Level: advanced 289 290 .keywords: DMDA, set, options, prefix, database 291 292 .seealso: DMSetFromOptions() 293 @*/ 294 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 295 { 296 PetscErrorCode ierr; 297 298 PetscFunctionBegin; 299 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 300 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 301 PetscFunctionReturn(0); 302 } 303 304 #undef __FUNCT__ 305 #define __FUNCT__ "DMDestroy" 306 /*@ 307 DMDestroy - Destroys a vector packer or DMDA. 308 309 Collective on DM 310 311 Input Parameter: 312 . dm - the DM object to destroy 313 314 Level: developer 315 316 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 317 318 @*/ 319 PetscErrorCode DMDestroy(DM *dm) 320 { 321 PetscInt i, cnt = 0, f; 322 DMNamedVecLink nlink,nnext; 323 PetscErrorCode ierr; 324 325 PetscFunctionBegin; 326 if (!*dm) PetscFunctionReturn(0); 327 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 328 329 /* count all the circular references of DM and its contained Vecs */ 330 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 331 if ((*dm)->localin[i]) cnt++; 332 if ((*dm)->globalin[i]) cnt++; 333 } 334 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 335 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 336 if ((*dm)->x) { 337 DM obj; 338 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 339 if (obj == *dm) cnt++; 340 } 341 342 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 343 /* 344 Need this test because the dm references the vectors that 345 reference the dm, so destroying the dm calls destroy on the 346 vectors that cause another destroy on the dm 347 */ 348 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 349 ((PetscObject) (*dm))->refct = 0; 350 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 351 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 352 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 353 } 354 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 355 nnext = nlink->next; 356 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 357 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 358 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 359 ierr = PetscFree(nlink);CHKERRQ(ierr); 360 } 361 (*dm)->namedglobal = NULL; 362 363 for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */ 364 nnext = nlink->next; 365 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 366 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 367 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 368 ierr = PetscFree(nlink);CHKERRQ(ierr); 369 } 370 (*dm)->namedlocal = NULL; 371 372 /* Destroy the list of hooks */ 373 { 374 DMCoarsenHookLink link,next; 375 for (link=(*dm)->coarsenhook; link; link=next) { 376 next = link->next; 377 ierr = PetscFree(link);CHKERRQ(ierr); 378 } 379 (*dm)->coarsenhook = NULL; 380 } 381 { 382 DMRefineHookLink link,next; 383 for (link=(*dm)->refinehook; link; link=next) { 384 next = link->next; 385 ierr = PetscFree(link);CHKERRQ(ierr); 386 } 387 (*dm)->refinehook = NULL; 388 } 389 { 390 DMSubDomainHookLink link,next; 391 for (link=(*dm)->subdomainhook; link; link=next) { 392 next = link->next; 393 ierr = PetscFree(link);CHKERRQ(ierr); 394 } 395 (*dm)->subdomainhook = NULL; 396 } 397 { 398 DMGlobalToLocalHookLink link,next; 399 for (link=(*dm)->gtolhook; link; link=next) { 400 next = link->next; 401 ierr = PetscFree(link);CHKERRQ(ierr); 402 } 403 (*dm)->gtolhook = NULL; 404 } 405 /* Destroy the work arrays */ 406 { 407 DMWorkLink link,next; 408 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 409 for (link=(*dm)->workin; link; link=next) { 410 next = link->next; 411 ierr = PetscFree(link->mem);CHKERRQ(ierr); 412 ierr = PetscFree(link);CHKERRQ(ierr); 413 } 414 (*dm)->workin = NULL; 415 } 416 417 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 418 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 419 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 420 421 if ((*dm)->ctx && (*dm)->ctxdestroy) { 422 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 423 } 424 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 425 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 426 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 427 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 428 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 429 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 430 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 431 432 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 433 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 434 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 435 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 436 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 437 438 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 439 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 440 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 441 442 for (f = 0; f < (*dm)->numFields; ++f) { 443 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 444 } 445 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 446 /* if memory was published with AMS then destroy it */ 447 ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr); 448 449 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 450 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 451 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 452 PetscFunctionReturn(0); 453 } 454 455 #undef __FUNCT__ 456 #define __FUNCT__ "DMSetUp" 457 /*@ 458 DMSetUp - sets up the data structures inside a DM object 459 460 Collective on DM 461 462 Input Parameter: 463 . dm - the DM object to setup 464 465 Level: developer 466 467 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 468 469 @*/ 470 PetscErrorCode DMSetUp(DM dm) 471 { 472 PetscErrorCode ierr; 473 474 PetscFunctionBegin; 475 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 476 if (dm->setupcalled) PetscFunctionReturn(0); 477 if (dm->ops->setup) { 478 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 479 } 480 dm->setupcalled = PETSC_TRUE; 481 PetscFunctionReturn(0); 482 } 483 484 #undef __FUNCT__ 485 #define __FUNCT__ "DMSetFromOptions" 486 /*@ 487 DMSetFromOptions - sets parameters in a DM from the options database 488 489 Collective on DM 490 491 Input Parameter: 492 . dm - the DM object to set options for 493 494 Options Database: 495 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 496 . -dm_vec_type <type> type of vector to create inside DM 497 . -dm_mat_type <type> type of matrix to create inside DM 498 - -dm_coloring_type <global or ghosted> 499 500 Level: developer 501 502 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 503 504 @*/ 505 PetscErrorCode DMSetFromOptions(DM dm) 506 { 507 char typeName[256] = MATAIJ; 508 PetscBool flg; 509 PetscErrorCode ierr; 510 511 PetscFunctionBegin; 512 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 513 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 514 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 515 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 516 if (flg) { 517 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 518 } 519 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 520 if (flg) { 521 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 522 } 523 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 524 if (dm->ops->setfromoptions) { 525 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 526 } 527 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 528 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 529 ierr = PetscOptionsEnd();CHKERRQ(ierr); 530 ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr); 531 PetscFunctionReturn(0); 532 } 533 534 #undef __FUNCT__ 535 #define __FUNCT__ "DMView" 536 /*@C 537 DMView - Views a vector packer or DMDA. 538 539 Collective on DM 540 541 Input Parameter: 542 + dm - the DM object to view 543 - v - the viewer 544 545 Level: developer 546 547 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 548 549 @*/ 550 PetscErrorCode DMView(DM dm,PetscViewer v) 551 { 552 PetscErrorCode ierr; 553 PetscBool isbinary; 554 555 PetscFunctionBegin; 556 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 557 if (!v) { 558 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 559 } 560 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 561 if (isbinary) { 562 PetscInt classid = DM_FILE_CLASSID; 563 char type[256]; 564 565 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 566 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 567 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 568 } 569 if (dm->ops->view) { 570 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 571 } 572 PetscFunctionReturn(0); 573 } 574 575 #undef __FUNCT__ 576 #define __FUNCT__ "DMCreateGlobalVector" 577 /*@ 578 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 579 580 Collective on DM 581 582 Input Parameter: 583 . dm - the DM object 584 585 Output Parameter: 586 . vec - the global vector 587 588 Level: beginner 589 590 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 591 592 @*/ 593 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 594 { 595 PetscErrorCode ierr; 596 597 PetscFunctionBegin; 598 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 599 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 600 PetscFunctionReturn(0); 601 } 602 603 #undef __FUNCT__ 604 #define __FUNCT__ "DMCreateLocalVector" 605 /*@ 606 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 607 608 Not Collective 609 610 Input Parameter: 611 . dm - the DM object 612 613 Output Parameter: 614 . vec - the local vector 615 616 Level: beginner 617 618 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 619 620 @*/ 621 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 622 { 623 PetscErrorCode ierr; 624 625 PetscFunctionBegin; 626 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 627 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 628 PetscFunctionReturn(0); 629 } 630 631 #undef __FUNCT__ 632 #define __FUNCT__ "DMGetLocalToGlobalMapping" 633 /*@ 634 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 635 636 Collective on DM 637 638 Input Parameter: 639 . dm - the DM that provides the mapping 640 641 Output Parameter: 642 . ltog - the mapping 643 644 Level: intermediate 645 646 Notes: 647 This mapping can then be used by VecSetLocalToGlobalMapping() or 648 MatSetLocalToGlobalMapping(). 649 650 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 651 @*/ 652 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 653 { 654 PetscErrorCode ierr; 655 656 PetscFunctionBegin; 657 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 658 PetscValidPointer(ltog,2); 659 if (!dm->ltogmap) { 660 PetscSection section, sectionGlobal; 661 662 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 663 if (section) { 664 PetscInt *ltog; 665 PetscInt pStart, pEnd, size, p, l; 666 667 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 668 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 669 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 670 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 671 for (p = pStart, l = 0; p < pEnd; ++p) { 672 PetscInt dof, off, c; 673 674 /* Should probably use constrained dofs */ 675 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 676 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 677 for (c = 0; c < dof; ++c, ++l) { 678 ltog[l] = off+c; 679 } 680 } 681 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 682 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 683 } else { 684 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 685 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 686 } 687 } 688 *ltog = dm->ltogmap; 689 PetscFunctionReturn(0); 690 } 691 692 #undef __FUNCT__ 693 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 694 /*@ 695 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 696 697 Collective on DM 698 699 Input Parameter: 700 . da - the distributed array that provides the mapping 701 702 Output Parameter: 703 . ltog - the block mapping 704 705 Level: intermediate 706 707 Notes: 708 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 709 MatSetLocalToGlobalMappingBlock(). 710 711 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 712 @*/ 713 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 714 { 715 PetscErrorCode ierr; 716 717 PetscFunctionBegin; 718 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 719 PetscValidPointer(ltog,2); 720 if (!dm->ltogmapb) { 721 PetscInt bs; 722 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 723 if (bs > 1) { 724 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 725 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 726 } else { 727 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 728 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 729 } 730 } 731 *ltog = dm->ltogmapb; 732 PetscFunctionReturn(0); 733 } 734 735 #undef __FUNCT__ 736 #define __FUNCT__ "DMGetBlockSize" 737 /*@ 738 DMGetBlockSize - Gets the inherent block size associated with a DM 739 740 Not Collective 741 742 Input Parameter: 743 . dm - the DM with block structure 744 745 Output Parameter: 746 . bs - the block size, 1 implies no exploitable block structure 747 748 Level: intermediate 749 750 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 751 @*/ 752 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 753 { 754 PetscFunctionBegin; 755 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 756 PetscValidPointer(bs,2); 757 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 758 *bs = dm->bs; 759 PetscFunctionReturn(0); 760 } 761 762 #undef __FUNCT__ 763 #define __FUNCT__ "DMCreateInterpolation" 764 /*@ 765 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 766 767 Collective on DM 768 769 Input Parameter: 770 + dm1 - the DM object 771 - dm2 - the second, finer DM object 772 773 Output Parameter: 774 + mat - the interpolation 775 - vec - the scaling (optional) 776 777 Level: developer 778 779 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 780 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 781 782 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 783 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 784 785 786 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 787 788 @*/ 789 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 790 { 791 PetscErrorCode ierr; 792 793 PetscFunctionBegin; 794 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 795 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 796 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 797 PetscFunctionReturn(0); 798 } 799 800 #undef __FUNCT__ 801 #define __FUNCT__ "DMCreateInjection" 802 /*@ 803 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 804 805 Collective on DM 806 807 Input Parameter: 808 + dm1 - the DM object 809 - dm2 - the second, finer DM object 810 811 Output Parameter: 812 . ctx - the injection 813 814 Level: developer 815 816 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 817 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 818 819 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 820 821 @*/ 822 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 823 { 824 PetscErrorCode ierr; 825 826 PetscFunctionBegin; 827 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 828 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 829 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 830 PetscFunctionReturn(0); 831 } 832 833 #undef __FUNCT__ 834 #define __FUNCT__ "DMCreateColoring" 835 /*@C 836 DMCreateColoring - Gets coloring for a DMDA or DMComposite 837 838 Collective on DM 839 840 Input Parameter: 841 + dm - the DM object 842 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 843 - matype - either MATAIJ or MATBAIJ 844 845 Output Parameter: 846 . coloring - the coloring 847 848 Level: developer 849 850 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 851 852 @*/ 853 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 854 { 855 PetscErrorCode ierr; 856 857 PetscFunctionBegin; 858 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 859 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 860 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 861 PetscFunctionReturn(0); 862 } 863 864 #undef __FUNCT__ 865 #define __FUNCT__ "DMCreateMatrix" 866 /*@C 867 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 868 869 Collective on DM 870 871 Input Parameter: 872 + dm - the DM object 873 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 874 any type which inherits from one of these (such as MATAIJ) 875 876 Output Parameter: 877 . mat - the empty Jacobian 878 879 Level: beginner 880 881 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 882 do not need to do it yourself. 883 884 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 885 the nonzero pattern call DMDASetMatPreallocateOnly() 886 887 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 888 internally by PETSc. 889 890 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 891 the indices for the global numbering for DMDAs which is complicated. 892 893 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 894 895 @*/ 896 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 897 { 898 PetscErrorCode ierr; 899 900 PetscFunctionBegin; 901 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 902 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 903 ierr = MatInitializePackage();CHKERRQ(ierr); 904 #endif 905 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 906 PetscValidPointer(mat,3); 907 if (dm->mattype) { 908 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 909 } else { 910 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 911 } 912 PetscFunctionReturn(0); 913 } 914 915 #undef __FUNCT__ 916 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 917 /*@ 918 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 919 preallocated but the nonzero structure and zero values will not be set. 920 921 Logically Collective on DMDA 922 923 Input Parameter: 924 + dm - the DM 925 - only - PETSC_TRUE if only want preallocation 926 927 Level: developer 928 .seealso DMCreateMatrix() 929 @*/ 930 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 931 { 932 PetscFunctionBegin; 933 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 934 dm->prealloc_only = only; 935 PetscFunctionReturn(0); 936 } 937 938 #undef __FUNCT__ 939 #define __FUNCT__ "DMGetWorkArray" 940 /*@C 941 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 942 943 Not Collective 944 945 Input Parameters: 946 + dm - the DM object 947 . count - The minium size 948 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 949 950 Output Parameter: 951 . array - the work array 952 953 Level: developer 954 955 .seealso DMDestroy(), DMCreate() 956 @*/ 957 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 958 { 959 PetscErrorCode ierr; 960 DMWorkLink link; 961 size_t size; 962 963 PetscFunctionBegin; 964 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 965 PetscValidPointer(mem,4); 966 if (dm->workin) { 967 link = dm->workin; 968 dm->workin = dm->workin->next; 969 } else { 970 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 971 } 972 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 973 if (size*count > link->bytes) { 974 ierr = PetscFree(link->mem);CHKERRQ(ierr); 975 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 976 link->bytes = size*count; 977 } 978 link->next = dm->workout; 979 dm->workout = link; 980 *(void**)mem = link->mem; 981 PetscFunctionReturn(0); 982 } 983 984 #undef __FUNCT__ 985 #define __FUNCT__ "DMRestoreWorkArray" 986 /*@C 987 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 988 989 Not Collective 990 991 Input Parameters: 992 + dm - the DM object 993 . count - The minium size 994 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 995 996 Output Parameter: 997 . array - the work array 998 999 Level: developer 1000 1001 .seealso DMDestroy(), DMCreate() 1002 @*/ 1003 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1004 { 1005 DMWorkLink *p,link; 1006 1007 PetscFunctionBegin; 1008 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1009 PetscValidPointer(mem,4); 1010 for (p=&dm->workout; (link=*p); p=&link->next) { 1011 if (link->mem == *(void**)mem) { 1012 *p = link->next; 1013 link->next = dm->workin; 1014 dm->workin = link; 1015 *(void**)mem = NULL; 1016 PetscFunctionReturn(0); 1017 } 1018 } 1019 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1020 PetscFunctionReturn(0); 1021 } 1022 1023 #undef __FUNCT__ 1024 #define __FUNCT__ "DMSetNullSpaceConstructor" 1025 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1026 { 1027 PetscFunctionBegin; 1028 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1029 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1030 dm->nullspaceConstructors[field] = nullsp; 1031 PetscFunctionReturn(0); 1032 } 1033 1034 #undef __FUNCT__ 1035 #define __FUNCT__ "DMCreateFieldIS" 1036 /*@C 1037 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1038 1039 Not collective 1040 1041 Input Parameter: 1042 . dm - the DM object 1043 1044 Output Parameters: 1045 + numFields - The number of fields (or NULL if not requested) 1046 . fieldNames - The name for each field (or NULL if not requested) 1047 - fields - The global indices for each field (or NULL if not requested) 1048 1049 Level: intermediate 1050 1051 Notes: 1052 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1053 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1054 PetscFree(). 1055 1056 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1057 @*/ 1058 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1059 { 1060 PetscSection section, sectionGlobal; 1061 PetscErrorCode ierr; 1062 1063 PetscFunctionBegin; 1064 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1065 if (numFields) { 1066 PetscValidPointer(numFields,2); 1067 *numFields = 0; 1068 } 1069 if (fieldNames) { 1070 PetscValidPointer(fieldNames,3); 1071 *fieldNames = NULL; 1072 } 1073 if (fields) { 1074 PetscValidPointer(fields,4); 1075 *fields = NULL; 1076 } 1077 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1078 if (section) { 1079 PetscInt *fieldSizes, **fieldIndices; 1080 PetscInt nF, f, pStart, pEnd, p; 1081 1082 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1083 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1084 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr); 1085 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1086 for (f = 0; f < nF; ++f) { 1087 fieldSizes[f] = 0; 1088 } 1089 for (p = pStart; p < pEnd; ++p) { 1090 PetscInt gdof; 1091 1092 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1093 if (gdof > 0) { 1094 for (f = 0; f < nF; ++f) { 1095 PetscInt fdof, fcdof; 1096 1097 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1098 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1099 fieldSizes[f] += fdof-fcdof; 1100 } 1101 } 1102 } 1103 for (f = 0; f < nF; ++f) { 1104 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1105 fieldSizes[f] = 0; 1106 } 1107 for (p = pStart; p < pEnd; ++p) { 1108 PetscInt gdof, goff; 1109 1110 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1111 if (gdof > 0) { 1112 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1113 for (f = 0; f < nF; ++f) { 1114 PetscInt fdof, fcdof, fc; 1115 1116 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1117 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1118 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1119 fieldIndices[f][fieldSizes[f]] = goff++; 1120 } 1121 } 1122 } 1123 } 1124 if (numFields) *numFields = nF; 1125 if (fieldNames) { 1126 ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr); 1127 for (f = 0; f < nF; ++f) { 1128 const char *fieldName; 1129 1130 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1131 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1132 } 1133 } 1134 if (fields) { 1135 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1136 for (f = 0; f < nF; ++f) { 1137 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1138 } 1139 } 1140 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1141 } else if (dm->ops->createfieldis) { 1142 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1143 } 1144 PetscFunctionReturn(0); 1145 } 1146 1147 1148 #undef __FUNCT__ 1149 #define __FUNCT__ "DMCreateFieldDecomposition" 1150 /*@C 1151 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1152 corresponding to different fields: each IS contains the global indices of the dofs of the 1153 corresponding field. The optional list of DMs define the DM for each subproblem. 1154 Generalizes DMCreateFieldIS(). 1155 1156 Not collective 1157 1158 Input Parameter: 1159 . dm - the DM object 1160 1161 Output Parameters: 1162 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1163 . namelist - The name for each field (or NULL if not requested) 1164 . islist - The global indices for each field (or NULL if not requested) 1165 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1166 1167 Level: intermediate 1168 1169 Notes: 1170 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1171 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1172 and all of the arrays should be freed with PetscFree(). 1173 1174 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1175 @*/ 1176 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1177 { 1178 PetscErrorCode ierr; 1179 1180 PetscFunctionBegin; 1181 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1182 if (len) { 1183 PetscValidPointer(len,2); 1184 *len = 0; 1185 } 1186 if (namelist) { 1187 PetscValidPointer(namelist,3); 1188 *namelist = 0; 1189 } 1190 if (islist) { 1191 PetscValidPointer(islist,4); 1192 *islist = 0; 1193 } 1194 if (dmlist) { 1195 PetscValidPointer(dmlist,5); 1196 *dmlist = 0; 1197 } 1198 /* 1199 Is it a good idea to apply the following check across all impls? 1200 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1201 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1202 */ 1203 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1204 if (!dm->ops->createfielddecomposition) { 1205 PetscSection section; 1206 PetscInt numFields, f; 1207 1208 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1209 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1210 if (section && numFields && dm->ops->createsubdm) { 1211 *len = numFields; 1212 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1213 for (f = 0; f < numFields; ++f) { 1214 const char *fieldName; 1215 1216 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1217 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1218 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1219 } 1220 } else { 1221 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1222 /* By default there are no DMs associated with subproblems. */ 1223 if (dmlist) *dmlist = NULL; 1224 } 1225 } else { 1226 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1227 } 1228 PetscFunctionReturn(0); 1229 } 1230 1231 #undef __FUNCT__ 1232 #define __FUNCT__ "DMCreateSubDM" 1233 /*@C 1234 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1235 The fields are defined by DMCreateFieldIS(). 1236 1237 Not collective 1238 1239 Input Parameters: 1240 + dm - the DM object 1241 . numFields - number of fields in this subproblem 1242 - len - The number of subproblems in the decomposition (or NULL if not requested) 1243 1244 Output Parameters: 1245 . is - The global indices for the subproblem 1246 - dm - The DM for the subproblem 1247 1248 Level: intermediate 1249 1250 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1251 @*/ 1252 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1253 { 1254 PetscErrorCode ierr; 1255 1256 PetscFunctionBegin; 1257 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1258 PetscValidPointer(fields,3); 1259 if (is) PetscValidPointer(is,4); 1260 if (subdm) PetscValidPointer(subdm,5); 1261 if (dm->ops->createsubdm) { 1262 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1263 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1264 PetscFunctionReturn(0); 1265 } 1266 1267 1268 #undef __FUNCT__ 1269 #define __FUNCT__ "DMCreateDomainDecomposition" 1270 /*@C 1271 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1272 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1273 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1274 define a nonoverlapping covering, while outer subdomains can overlap. 1275 The optional list of DMs define the DM for each subproblem. 1276 1277 Not collective 1278 1279 Input Parameter: 1280 . dm - the DM object 1281 1282 Output Parameters: 1283 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1284 . namelist - The name for each subdomain (or NULL if not requested) 1285 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1286 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1287 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1288 1289 Level: intermediate 1290 1291 Notes: 1292 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1293 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1294 and all of the arrays should be freed with PetscFree(). 1295 1296 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1297 @*/ 1298 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1299 { 1300 PetscErrorCode ierr; 1301 DMSubDomainHookLink link; 1302 PetscInt i,l; 1303 1304 PetscFunctionBegin; 1305 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1306 if (len) {PetscValidPointer(len,2); *len = 0;} 1307 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1308 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1309 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1310 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1311 /* 1312 Is it a good idea to apply the following check across all impls? 1313 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1314 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1315 */ 1316 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1317 if (dm->ops->createdomaindecomposition) { 1318 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1319 /* copy subdomain hooks and context over to the subdomain DMs */ 1320 if (dmlist) { 1321 for (i = 0; i < l; i++) { 1322 for (link=dm->subdomainhook; link; link=link->next) { 1323 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1324 } 1325 (*dmlist)[i]->ctx = dm->ctx; 1326 } 1327 } 1328 if (len) *len = l; 1329 } 1330 PetscFunctionReturn(0); 1331 } 1332 1333 1334 #undef __FUNCT__ 1335 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1336 /*@C 1337 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1338 1339 Not collective 1340 1341 Input Parameters: 1342 + dm - the DM object 1343 . n - the number of subdomain scatters 1344 - subdms - the local subdomains 1345 1346 Output Parameters: 1347 + n - the number of scatters returned 1348 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1349 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1350 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1351 1352 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1353 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1354 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1355 solution and residual data. 1356 1357 Level: developer 1358 1359 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1360 @*/ 1361 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1362 { 1363 PetscErrorCode ierr; 1364 1365 PetscFunctionBegin; 1366 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1367 PetscValidPointer(subdms,3); 1368 if (dm->ops->createddscatters) { 1369 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1370 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1371 PetscFunctionReturn(0); 1372 } 1373 1374 #undef __FUNCT__ 1375 #define __FUNCT__ "DMRefine" 1376 /*@ 1377 DMRefine - Refines a DM object 1378 1379 Collective on DM 1380 1381 Input Parameter: 1382 + dm - the DM object 1383 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1384 1385 Output Parameter: 1386 . dmf - the refined DM, or NULL 1387 1388 Note: If no refinement was done, the return value is NULL 1389 1390 Level: developer 1391 1392 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1393 @*/ 1394 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1395 { 1396 PetscErrorCode ierr; 1397 DMRefineHookLink link; 1398 1399 PetscFunctionBegin; 1400 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1401 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1402 if (*dmf) { 1403 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1404 1405 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1406 1407 (*dmf)->ctx = dm->ctx; 1408 (*dmf)->leveldown = dm->leveldown; 1409 (*dmf)->levelup = dm->levelup + 1; 1410 1411 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1412 for (link=dm->refinehook; link; link=link->next) { 1413 if (link->refinehook) { 1414 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1415 } 1416 } 1417 } 1418 PetscFunctionReturn(0); 1419 } 1420 1421 #undef __FUNCT__ 1422 #define __FUNCT__ "DMRefineHookAdd" 1423 /*@C 1424 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1425 1426 Logically Collective 1427 1428 Input Arguments: 1429 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1430 . refinehook - function to run when setting up a coarser level 1431 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1432 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1433 1434 Calling sequence of refinehook: 1435 $ refinehook(DM coarse,DM fine,void *ctx); 1436 1437 + coarse - coarse level DM 1438 . fine - fine level DM to interpolate problem to 1439 - ctx - optional user-defined function context 1440 1441 Calling sequence for interphook: 1442 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1443 1444 + coarse - coarse level DM 1445 . interp - matrix interpolating a coarse-level solution to the finer grid 1446 . fine - fine level DM to update 1447 - ctx - optional user-defined function context 1448 1449 Level: advanced 1450 1451 Notes: 1452 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1453 1454 If this function is called multiple times, the hooks will be run in the order they are added. 1455 1456 This function is currently not available from Fortran. 1457 1458 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1459 @*/ 1460 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1461 { 1462 PetscErrorCode ierr; 1463 DMRefineHookLink link,*p; 1464 1465 PetscFunctionBegin; 1466 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1467 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1468 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1469 link->refinehook = refinehook; 1470 link->interphook = interphook; 1471 link->ctx = ctx; 1472 link->next = NULL; 1473 *p = link; 1474 PetscFunctionReturn(0); 1475 } 1476 1477 #undef __FUNCT__ 1478 #define __FUNCT__ "DMInterpolate" 1479 /*@ 1480 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1481 1482 Collective if any hooks are 1483 1484 Input Arguments: 1485 + coarse - coarser DM to use as a base 1486 . restrct - interpolation matrix, apply using MatInterpolate() 1487 - fine - finer DM to update 1488 1489 Level: developer 1490 1491 .seealso: DMRefineHookAdd(), MatInterpolate() 1492 @*/ 1493 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1494 { 1495 PetscErrorCode ierr; 1496 DMRefineHookLink link; 1497 1498 PetscFunctionBegin; 1499 for (link=fine->refinehook; link; link=link->next) { 1500 if (link->interphook) { 1501 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1502 } 1503 } 1504 PetscFunctionReturn(0); 1505 } 1506 1507 #undef __FUNCT__ 1508 #define __FUNCT__ "DMGetRefineLevel" 1509 /*@ 1510 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1511 1512 Not Collective 1513 1514 Input Parameter: 1515 . dm - the DM object 1516 1517 Output Parameter: 1518 . level - number of refinements 1519 1520 Level: developer 1521 1522 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1523 1524 @*/ 1525 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1526 { 1527 PetscFunctionBegin; 1528 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1529 *level = dm->levelup; 1530 PetscFunctionReturn(0); 1531 } 1532 1533 #undef __FUNCT__ 1534 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1535 /*@C 1536 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1537 1538 Logically Collective 1539 1540 Input Arguments: 1541 + dm - the DM 1542 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1543 . endhook - function to run after DMGlobalToLocalEnd() has completed 1544 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1545 1546 Calling sequence for beginhook: 1547 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1548 1549 + dm - global DM 1550 . g - global vector 1551 . mode - mode 1552 . l - local vector 1553 - ctx - optional user-defined function context 1554 1555 1556 Calling sequence for endhook: 1557 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1558 1559 + global - global DM 1560 - ctx - optional user-defined function context 1561 1562 Level: advanced 1563 1564 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1565 @*/ 1566 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1567 { 1568 PetscErrorCode ierr; 1569 DMGlobalToLocalHookLink link,*p; 1570 1571 PetscFunctionBegin; 1572 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1573 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1574 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1575 link->beginhook = beginhook; 1576 link->endhook = endhook; 1577 link->ctx = ctx; 1578 link->next = NULL; 1579 *p = link; 1580 PetscFunctionReturn(0); 1581 } 1582 1583 #undef __FUNCT__ 1584 #define __FUNCT__ "DMGlobalToLocalBegin" 1585 /*@ 1586 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1587 1588 Neighbor-wise Collective on DM 1589 1590 Input Parameters: 1591 + dm - the DM object 1592 . g - the global vector 1593 . mode - INSERT_VALUES or ADD_VALUES 1594 - l - the local vector 1595 1596 1597 Level: beginner 1598 1599 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1600 1601 @*/ 1602 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1603 { 1604 PetscSF sf; 1605 PetscErrorCode ierr; 1606 DMGlobalToLocalHookLink link; 1607 1608 PetscFunctionBegin; 1609 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1610 for (link=dm->gtolhook; link; link=link->next) { 1611 if (link->beginhook) { 1612 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1613 } 1614 } 1615 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1616 if (sf) { 1617 PetscScalar *lArray, *gArray; 1618 1619 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1620 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1621 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1622 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1623 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1624 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1625 } else { 1626 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1627 } 1628 PetscFunctionReturn(0); 1629 } 1630 1631 #undef __FUNCT__ 1632 #define __FUNCT__ "DMGlobalToLocalEnd" 1633 /*@ 1634 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1635 1636 Neighbor-wise Collective on DM 1637 1638 Input Parameters: 1639 + dm - the DM object 1640 . g - the global vector 1641 . mode - INSERT_VALUES or ADD_VALUES 1642 - l - the local vector 1643 1644 1645 Level: beginner 1646 1647 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1648 1649 @*/ 1650 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1651 { 1652 PetscSF sf; 1653 PetscErrorCode ierr; 1654 PetscScalar *lArray, *gArray; 1655 DMGlobalToLocalHookLink link; 1656 1657 PetscFunctionBegin; 1658 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1659 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1660 if (sf) { 1661 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1662 1663 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1664 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1665 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1666 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1667 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1668 } else { 1669 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1670 } 1671 for (link=dm->gtolhook; link; link=link->next) { 1672 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1673 } 1674 PetscFunctionReturn(0); 1675 } 1676 1677 #undef __FUNCT__ 1678 #define __FUNCT__ "DMLocalToGlobalBegin" 1679 /*@ 1680 DMLocalToGlobalBegin - updates global vectors from local vectors 1681 1682 Neighbor-wise Collective on DM 1683 1684 Input Parameters: 1685 + dm - the DM object 1686 . l - the local vector 1687 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1688 base point. 1689 - - the global vector 1690 1691 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1692 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1693 global array to the final global array with VecAXPY(). 1694 1695 Level: beginner 1696 1697 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1698 1699 @*/ 1700 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1701 { 1702 PetscSF sf; 1703 PetscErrorCode ierr; 1704 1705 PetscFunctionBegin; 1706 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1707 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1708 if (sf) { 1709 MPI_Op op; 1710 PetscScalar *lArray, *gArray; 1711 1712 switch (mode) { 1713 case INSERT_VALUES: 1714 case INSERT_ALL_VALUES: 1715 op = MPIU_REPLACE; break; 1716 case ADD_VALUES: 1717 case ADD_ALL_VALUES: 1718 op = MPI_SUM; break; 1719 default: 1720 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1721 } 1722 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1723 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1724 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1725 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1726 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1727 } else { 1728 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1729 } 1730 PetscFunctionReturn(0); 1731 } 1732 1733 #undef __FUNCT__ 1734 #define __FUNCT__ "DMLocalToGlobalEnd" 1735 /*@ 1736 DMLocalToGlobalEnd - updates global vectors from local vectors 1737 1738 Neighbor-wise Collective on DM 1739 1740 Input Parameters: 1741 + dm - the DM object 1742 . l - the local vector 1743 . mode - INSERT_VALUES or ADD_VALUES 1744 - g - the global vector 1745 1746 1747 Level: beginner 1748 1749 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1750 1751 @*/ 1752 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1753 { 1754 PetscSF sf; 1755 PetscErrorCode ierr; 1756 1757 PetscFunctionBegin; 1758 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1759 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1760 if (sf) { 1761 MPI_Op op; 1762 PetscScalar *lArray, *gArray; 1763 1764 switch (mode) { 1765 case INSERT_VALUES: 1766 case INSERT_ALL_VALUES: 1767 op = MPIU_REPLACE; break; 1768 case ADD_VALUES: 1769 case ADD_ALL_VALUES: 1770 op = MPI_SUM; break; 1771 default: 1772 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1773 } 1774 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1775 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1776 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1777 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1778 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1779 } else { 1780 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1781 } 1782 PetscFunctionReturn(0); 1783 } 1784 1785 #undef __FUNCT__ 1786 #define __FUNCT__ "DMCoarsen" 1787 /*@ 1788 DMCoarsen - Coarsens a DM object 1789 1790 Collective on DM 1791 1792 Input Parameter: 1793 + dm - the DM object 1794 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1795 1796 Output Parameter: 1797 . dmc - the coarsened DM 1798 1799 Level: developer 1800 1801 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1802 1803 @*/ 1804 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1805 { 1806 PetscErrorCode ierr; 1807 DMCoarsenHookLink link; 1808 1809 PetscFunctionBegin; 1810 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1811 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1812 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1813 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1814 (*dmc)->ctx = dm->ctx; 1815 (*dmc)->levelup = dm->levelup; 1816 (*dmc)->leveldown = dm->leveldown + 1; 1817 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1818 for (link=dm->coarsenhook; link; link=link->next) { 1819 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1820 } 1821 PetscFunctionReturn(0); 1822 } 1823 1824 #undef __FUNCT__ 1825 #define __FUNCT__ "DMCoarsenHookAdd" 1826 /*@C 1827 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1828 1829 Logically Collective 1830 1831 Input Arguments: 1832 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1833 . coarsenhook - function to run when setting up a coarser level 1834 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1835 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1836 1837 Calling sequence of coarsenhook: 1838 $ coarsenhook(DM fine,DM coarse,void *ctx); 1839 1840 + fine - fine level DM 1841 . coarse - coarse level DM to restrict problem to 1842 - ctx - optional user-defined function context 1843 1844 Calling sequence for restricthook: 1845 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1846 1847 + fine - fine level DM 1848 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1849 . rscale - scaling vector for restriction 1850 . inject - matrix restricting by injection 1851 . coarse - coarse level DM to update 1852 - ctx - optional user-defined function context 1853 1854 Level: advanced 1855 1856 Notes: 1857 This function is only needed if auxiliary data needs to be set up on coarse grids. 1858 1859 If this function is called multiple times, the hooks will be run in the order they are added. 1860 1861 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1862 extract the finest level information from its context (instead of from the SNES). 1863 1864 This function is currently not available from Fortran. 1865 1866 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1867 @*/ 1868 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1869 { 1870 PetscErrorCode ierr; 1871 DMCoarsenHookLink link,*p; 1872 1873 PetscFunctionBegin; 1874 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1875 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1876 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1877 link->coarsenhook = coarsenhook; 1878 link->restricthook = restricthook; 1879 link->ctx = ctx; 1880 link->next = NULL; 1881 *p = link; 1882 PetscFunctionReturn(0); 1883 } 1884 1885 #undef __FUNCT__ 1886 #define __FUNCT__ "DMRestrict" 1887 /*@ 1888 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1889 1890 Collective if any hooks are 1891 1892 Input Arguments: 1893 + fine - finer DM to use as a base 1894 . restrct - restriction matrix, apply using MatRestrict() 1895 . inject - injection matrix, also use MatRestrict() 1896 - coarse - coarer DM to update 1897 1898 Level: developer 1899 1900 .seealso: DMCoarsenHookAdd(), MatRestrict() 1901 @*/ 1902 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1903 { 1904 PetscErrorCode ierr; 1905 DMCoarsenHookLink link; 1906 1907 PetscFunctionBegin; 1908 for (link=fine->coarsenhook; link; link=link->next) { 1909 if (link->restricthook) { 1910 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 1911 } 1912 } 1913 PetscFunctionReturn(0); 1914 } 1915 1916 #undef __FUNCT__ 1917 #define __FUNCT__ "DMSubDomainHookAdd" 1918 /*@C 1919 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 1920 1921 Logically Collective 1922 1923 Input Arguments: 1924 + global - global DM 1925 . ddhook - function to run to pass data to the decomposition DM upon its creation 1926 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1927 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1928 1929 1930 Calling sequence for ddhook: 1931 $ ddhook(DM global,DM block,void *ctx) 1932 1933 + global - global DM 1934 . block - block DM 1935 - ctx - optional user-defined function context 1936 1937 Calling sequence for restricthook: 1938 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 1939 1940 + global - global DM 1941 . out - scatter to the outer (with ghost and overlap points) block vector 1942 . in - scatter to block vector values only owned locally 1943 . block - block DM 1944 - ctx - optional user-defined function context 1945 1946 Level: advanced 1947 1948 Notes: 1949 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 1950 1951 If this function is called multiple times, the hooks will be run in the order they are added. 1952 1953 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1954 extract the global information from its context (instead of from the SNES). 1955 1956 This function is currently not available from Fortran. 1957 1958 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1959 @*/ 1960 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1961 { 1962 PetscErrorCode ierr; 1963 DMSubDomainHookLink link,*p; 1964 1965 PetscFunctionBegin; 1966 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1967 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1968 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 1969 link->restricthook = restricthook; 1970 link->ddhook = ddhook; 1971 link->ctx = ctx; 1972 link->next = NULL; 1973 *p = link; 1974 PetscFunctionReturn(0); 1975 } 1976 1977 #undef __FUNCT__ 1978 #define __FUNCT__ "DMSubDomainRestrict" 1979 /*@ 1980 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 1981 1982 Collective if any hooks are 1983 1984 Input Arguments: 1985 + fine - finer DM to use as a base 1986 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 1987 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 1988 - coarse - coarer DM to update 1989 1990 Level: developer 1991 1992 .seealso: DMCoarsenHookAdd(), MatRestrict() 1993 @*/ 1994 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 1995 { 1996 PetscErrorCode ierr; 1997 DMSubDomainHookLink link; 1998 1999 PetscFunctionBegin; 2000 for (link=global->subdomainhook; link; link=link->next) { 2001 if (link->restricthook) { 2002 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2003 } 2004 } 2005 PetscFunctionReturn(0); 2006 } 2007 2008 #undef __FUNCT__ 2009 #define __FUNCT__ "DMGetCoarsenLevel" 2010 /*@ 2011 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2012 2013 Not Collective 2014 2015 Input Parameter: 2016 . dm - the DM object 2017 2018 Output Parameter: 2019 . level - number of coarsenings 2020 2021 Level: developer 2022 2023 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2024 2025 @*/ 2026 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2027 { 2028 PetscFunctionBegin; 2029 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2030 *level = dm->leveldown; 2031 PetscFunctionReturn(0); 2032 } 2033 2034 2035 2036 #undef __FUNCT__ 2037 #define __FUNCT__ "DMRefineHierarchy" 2038 /*@C 2039 DMRefineHierarchy - Refines a DM object, all levels at once 2040 2041 Collective on DM 2042 2043 Input Parameter: 2044 + dm - the DM object 2045 - nlevels - the number of levels of refinement 2046 2047 Output Parameter: 2048 . dmf - the refined DM hierarchy 2049 2050 Level: developer 2051 2052 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2053 2054 @*/ 2055 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2056 { 2057 PetscErrorCode ierr; 2058 2059 PetscFunctionBegin; 2060 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2061 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2062 if (nlevels == 0) PetscFunctionReturn(0); 2063 if (dm->ops->refinehierarchy) { 2064 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2065 } else if (dm->ops->refine) { 2066 PetscInt i; 2067 2068 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2069 for (i=1; i<nlevels; i++) { 2070 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2071 } 2072 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2073 PetscFunctionReturn(0); 2074 } 2075 2076 #undef __FUNCT__ 2077 #define __FUNCT__ "DMCoarsenHierarchy" 2078 /*@C 2079 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2080 2081 Collective on DM 2082 2083 Input Parameter: 2084 + dm - the DM object 2085 - nlevels - the number of levels of coarsening 2086 2087 Output Parameter: 2088 . dmc - the coarsened DM hierarchy 2089 2090 Level: developer 2091 2092 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2093 2094 @*/ 2095 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2096 { 2097 PetscErrorCode ierr; 2098 2099 PetscFunctionBegin; 2100 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2101 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2102 if (nlevels == 0) PetscFunctionReturn(0); 2103 PetscValidPointer(dmc,3); 2104 if (dm->ops->coarsenhierarchy) { 2105 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2106 } else if (dm->ops->coarsen) { 2107 PetscInt i; 2108 2109 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2110 for (i=1; i<nlevels; i++) { 2111 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2112 } 2113 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2114 PetscFunctionReturn(0); 2115 } 2116 2117 #undef __FUNCT__ 2118 #define __FUNCT__ "DMCreateAggregates" 2119 /*@ 2120 DMCreateAggregates - Gets the aggregates that map between 2121 grids associated with two DMs. 2122 2123 Collective on DM 2124 2125 Input Parameters: 2126 + dmc - the coarse grid DM 2127 - dmf - the fine grid DM 2128 2129 Output Parameters: 2130 . rest - the restriction matrix (transpose of the projection matrix) 2131 2132 Level: intermediate 2133 2134 .keywords: interpolation, restriction, multigrid 2135 2136 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2137 @*/ 2138 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2139 { 2140 PetscErrorCode ierr; 2141 2142 PetscFunctionBegin; 2143 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2144 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2145 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2146 PetscFunctionReturn(0); 2147 } 2148 2149 #undef __FUNCT__ 2150 #define __FUNCT__ "DMSetApplicationContextDestroy" 2151 /*@C 2152 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2153 2154 Not Collective 2155 2156 Input Parameters: 2157 + dm - the DM object 2158 - destroy - the destroy function 2159 2160 Level: intermediate 2161 2162 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2163 2164 @*/ 2165 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2166 { 2167 PetscFunctionBegin; 2168 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2169 dm->ctxdestroy = destroy; 2170 PetscFunctionReturn(0); 2171 } 2172 2173 #undef __FUNCT__ 2174 #define __FUNCT__ "DMSetApplicationContext" 2175 /*@ 2176 DMSetApplicationContext - Set a user context into a DM object 2177 2178 Not Collective 2179 2180 Input Parameters: 2181 + dm - the DM object 2182 - ctx - the user context 2183 2184 Level: intermediate 2185 2186 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2187 2188 @*/ 2189 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2190 { 2191 PetscFunctionBegin; 2192 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2193 dm->ctx = ctx; 2194 PetscFunctionReturn(0); 2195 } 2196 2197 #undef __FUNCT__ 2198 #define __FUNCT__ "DMGetApplicationContext" 2199 /*@ 2200 DMGetApplicationContext - Gets a user context from a DM object 2201 2202 Not Collective 2203 2204 Input Parameter: 2205 . dm - the DM object 2206 2207 Output Parameter: 2208 . ctx - the user context 2209 2210 Level: intermediate 2211 2212 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2213 2214 @*/ 2215 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2216 { 2217 PetscFunctionBegin; 2218 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2219 *(void**)ctx = dm->ctx; 2220 PetscFunctionReturn(0); 2221 } 2222 2223 #undef __FUNCT__ 2224 #define __FUNCT__ "DMSetVariableBounds" 2225 /*@C 2226 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2227 2228 Logically Collective on DM 2229 2230 Input Parameter: 2231 + dm - the DM object 2232 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2233 2234 Level: intermediate 2235 2236 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2237 DMSetJacobian() 2238 2239 @*/ 2240 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2241 { 2242 PetscFunctionBegin; 2243 dm->ops->computevariablebounds = f; 2244 PetscFunctionReturn(0); 2245 } 2246 2247 #undef __FUNCT__ 2248 #define __FUNCT__ "DMHasVariableBounds" 2249 /*@ 2250 DMHasVariableBounds - does the DM object have a variable bounds function? 2251 2252 Not Collective 2253 2254 Input Parameter: 2255 . dm - the DM object to destroy 2256 2257 Output Parameter: 2258 . flg - PETSC_TRUE if the variable bounds function exists 2259 2260 Level: developer 2261 2262 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2263 2264 @*/ 2265 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2266 { 2267 PetscFunctionBegin; 2268 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2269 PetscFunctionReturn(0); 2270 } 2271 2272 #undef __FUNCT__ 2273 #define __FUNCT__ "DMComputeVariableBounds" 2274 /*@C 2275 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2276 2277 Logically Collective on DM 2278 2279 Input Parameters: 2280 + dm - the DM object to destroy 2281 - x - current solution at which the bounds are computed 2282 2283 Output parameters: 2284 + xl - lower bound 2285 - xu - upper bound 2286 2287 Level: intermediate 2288 2289 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2290 2291 @*/ 2292 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2293 { 2294 PetscErrorCode ierr; 2295 2296 PetscFunctionBegin; 2297 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2298 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2299 if (dm->ops->computevariablebounds) { 2300 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2301 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2302 PetscFunctionReturn(0); 2303 } 2304 2305 #undef __FUNCT__ 2306 #define __FUNCT__ "DMHasColoring" 2307 /*@ 2308 DMHasColoring - does the DM object have a method of providing a coloring? 2309 2310 Not Collective 2311 2312 Input Parameter: 2313 . dm - the DM object 2314 2315 Output Parameter: 2316 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2317 2318 Level: developer 2319 2320 .seealso DMHasFunction(), DMCreateColoring() 2321 2322 @*/ 2323 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2324 { 2325 PetscFunctionBegin; 2326 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2327 PetscFunctionReturn(0); 2328 } 2329 2330 #undef __FUNCT__ 2331 #define __FUNCT__ "DMSetVec" 2332 /*@C 2333 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2334 2335 Collective on DM 2336 2337 Input Parameter: 2338 + dm - the DM object 2339 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2340 2341 Level: developer 2342 2343 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2344 2345 @*/ 2346 PetscErrorCode DMSetVec(DM dm,Vec x) 2347 { 2348 PetscErrorCode ierr; 2349 2350 PetscFunctionBegin; 2351 if (x) { 2352 if (!dm->x) { 2353 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2354 } 2355 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2356 } else if (dm->x) { 2357 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2358 } 2359 PetscFunctionReturn(0); 2360 } 2361 2362 PetscFunctionList DMList = NULL; 2363 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2364 2365 #undef __FUNCT__ 2366 #define __FUNCT__ "DMSetType" 2367 /*@C 2368 DMSetType - Builds a DM, for a particular DM implementation. 2369 2370 Collective on DM 2371 2372 Input Parameters: 2373 + dm - The DM object 2374 - method - The name of the DM type 2375 2376 Options Database Key: 2377 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2378 2379 Notes: 2380 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2381 2382 Level: intermediate 2383 2384 .keywords: DM, set, type 2385 .seealso: DMGetType(), DMCreate() 2386 @*/ 2387 PetscErrorCode DMSetType(DM dm, DMType method) 2388 { 2389 PetscErrorCode (*r)(DM); 2390 PetscBool match; 2391 PetscErrorCode ierr; 2392 2393 PetscFunctionBegin; 2394 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2395 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2396 if (match) PetscFunctionReturn(0); 2397 2398 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2399 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2400 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2401 2402 if (dm->ops->destroy) { 2403 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2404 dm->ops->destroy = NULL; 2405 } 2406 ierr = (*r)(dm);CHKERRQ(ierr); 2407 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2408 PetscFunctionReturn(0); 2409 } 2410 2411 #undef __FUNCT__ 2412 #define __FUNCT__ "DMGetType" 2413 /*@C 2414 DMGetType - Gets the DM type name (as a string) from the DM. 2415 2416 Not Collective 2417 2418 Input Parameter: 2419 . dm - The DM 2420 2421 Output Parameter: 2422 . type - The DM type name 2423 2424 Level: intermediate 2425 2426 .keywords: DM, get, type, name 2427 .seealso: DMSetType(), DMCreate() 2428 @*/ 2429 PetscErrorCode DMGetType(DM dm, DMType *type) 2430 { 2431 PetscErrorCode ierr; 2432 2433 PetscFunctionBegin; 2434 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2435 PetscValidCharPointer(type,2); 2436 if (!DMRegisterAllCalled) { 2437 ierr = DMRegisterAll();CHKERRQ(ierr); 2438 } 2439 *type = ((PetscObject)dm)->type_name; 2440 PetscFunctionReturn(0); 2441 } 2442 2443 #undef __FUNCT__ 2444 #define __FUNCT__ "DMConvert" 2445 /*@C 2446 DMConvert - Converts a DM to another DM, either of the same or different type. 2447 2448 Collective on DM 2449 2450 Input Parameters: 2451 + dm - the DM 2452 - newtype - new DM type (use "same" for the same type) 2453 2454 Output Parameter: 2455 . M - pointer to new DM 2456 2457 Notes: 2458 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2459 the MPI communicator of the generated DM is always the same as the communicator 2460 of the input DM. 2461 2462 Level: intermediate 2463 2464 .seealso: DMCreate() 2465 @*/ 2466 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2467 { 2468 DM B; 2469 char convname[256]; 2470 PetscBool sametype, issame; 2471 PetscErrorCode ierr; 2472 2473 PetscFunctionBegin; 2474 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2475 PetscValidType(dm,1); 2476 PetscValidPointer(M,3); 2477 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2478 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2479 { 2480 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2481 2482 /* 2483 Order of precedence: 2484 1) See if a specialized converter is known to the current DM. 2485 2) See if a specialized converter is known to the desired DM class. 2486 3) See if a good general converter is registered for the desired class 2487 4) See if a good general converter is known for the current matrix. 2488 5) Use a really basic converter. 2489 */ 2490 2491 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2492 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2493 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2494 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2495 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2496 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2497 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2498 if (conv) goto foundconv; 2499 2500 /* 2) See if a specialized converter is known to the desired DM class. */ 2501 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2502 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2503 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2504 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2505 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2506 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2507 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2508 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2509 if (conv) { 2510 ierr = DMDestroy(&B);CHKERRQ(ierr); 2511 goto foundconv; 2512 } 2513 2514 #if 0 2515 /* 3) See if a good general converter is registered for the desired class */ 2516 conv = B->ops->convertfrom; 2517 ierr = DMDestroy(&B);CHKERRQ(ierr); 2518 if (conv) goto foundconv; 2519 2520 /* 4) See if a good general converter is known for the current matrix */ 2521 if (dm->ops->convert) { 2522 conv = dm->ops->convert; 2523 } 2524 if (conv) goto foundconv; 2525 #endif 2526 2527 /* 5) Use a really basic converter. */ 2528 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2529 2530 foundconv: 2531 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2532 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2533 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2534 } 2535 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2536 PetscFunctionReturn(0); 2537 } 2538 2539 /*--------------------------------------------------------------------------------------------------------------------*/ 2540 2541 #undef __FUNCT__ 2542 #define __FUNCT__ "DMRegister" 2543 /*@C 2544 DMRegister - Adds a new DM component implementation 2545 2546 Not Collective 2547 2548 Input Parameters: 2549 + name - The name of a new user-defined creation routine 2550 - create_func - The creation routine itself 2551 2552 Notes: 2553 DMRegister() may be called multiple times to add several user-defined DMs 2554 2555 2556 Sample usage: 2557 .vb 2558 DMRegister("my_da", MyDMCreate); 2559 .ve 2560 2561 Then, your DM type can be chosen with the procedural interface via 2562 .vb 2563 DMCreate(MPI_Comm, DM *); 2564 DMSetType(DM,"my_da"); 2565 .ve 2566 or at runtime via the option 2567 .vb 2568 -da_type my_da 2569 .ve 2570 2571 Level: advanced 2572 2573 .keywords: DM, register 2574 .seealso: DMRegisterAll(), DMRegisterDestroy() 2575 2576 @*/ 2577 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2578 { 2579 PetscErrorCode ierr; 2580 2581 PetscFunctionBegin; 2582 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2583 PetscFunctionReturn(0); 2584 } 2585 2586 #undef __FUNCT__ 2587 #define __FUNCT__ "DMLoad" 2588 /*@C 2589 DMLoad - Loads a DM that has been stored in binary with DMView(). 2590 2591 Collective on PetscViewer 2592 2593 Input Parameters: 2594 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2595 some related function before a call to DMLoad(). 2596 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2597 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2598 2599 Level: intermediate 2600 2601 Notes: 2602 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2603 2604 Notes for advanced users: 2605 Most users should not need to know the details of the binary storage 2606 format, since DMLoad() and DMView() completely hide these details. 2607 But for anyone who's interested, the standard binary matrix storage 2608 format is 2609 .vb 2610 has not yet been determined 2611 .ve 2612 2613 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2614 @*/ 2615 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2616 { 2617 PetscErrorCode ierr; 2618 PetscBool isbinary; 2619 PetscInt classid; 2620 char type[256]; 2621 2622 PetscFunctionBegin; 2623 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2624 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2625 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2626 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2627 2628 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2629 if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file"); 2630 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2631 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2632 if (newdm->ops->load) { 2633 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2634 } 2635 PetscFunctionReturn(0); 2636 } 2637 2638 /******************************** FEM Support **********************************/ 2639 2640 #undef __FUNCT__ 2641 #define __FUNCT__ "DMPrintCellVector" 2642 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2643 { 2644 PetscInt f; 2645 PetscErrorCode ierr; 2646 2647 PetscFunctionBegin; 2648 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2649 for (f = 0; f < len; ++f) { 2650 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2651 } 2652 PetscFunctionReturn(0); 2653 } 2654 2655 #undef __FUNCT__ 2656 #define __FUNCT__ "DMPrintCellMatrix" 2657 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2658 { 2659 PetscInt f, g; 2660 PetscErrorCode ierr; 2661 2662 PetscFunctionBegin; 2663 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2664 for (f = 0; f < rows; ++f) { 2665 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2666 for (g = 0; g < cols; ++g) { 2667 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2668 } 2669 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2670 } 2671 PetscFunctionReturn(0); 2672 } 2673 2674 #undef __FUNCT__ 2675 #define __FUNCT__ "DMGetDefaultSection" 2676 /*@ 2677 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2678 2679 Input Parameter: 2680 . dm - The DM 2681 2682 Output Parameter: 2683 . section - The PetscSection 2684 2685 Level: intermediate 2686 2687 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2688 2689 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2690 @*/ 2691 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2692 { 2693 PetscFunctionBegin; 2694 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2695 PetscValidPointer(section, 2); 2696 *section = dm->defaultSection; 2697 PetscFunctionReturn(0); 2698 } 2699 2700 #undef __FUNCT__ 2701 #define __FUNCT__ "DMSetDefaultSection" 2702 /*@ 2703 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2704 2705 Input Parameters: 2706 + dm - The DM 2707 - section - The PetscSection 2708 2709 Level: intermediate 2710 2711 Note: Any existing Section will be destroyed 2712 2713 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2714 @*/ 2715 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2716 { 2717 PetscInt numFields; 2718 PetscInt f; 2719 PetscErrorCode ierr; 2720 2721 PetscFunctionBegin; 2722 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2723 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2724 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2725 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2726 dm->defaultSection = section; 2727 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2728 if (numFields) { 2729 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2730 for (f = 0; f < numFields; ++f) { 2731 const char *name; 2732 2733 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2734 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2735 } 2736 } 2737 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2738 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2739 PetscFunctionReturn(0); 2740 } 2741 2742 #undef __FUNCT__ 2743 #define __FUNCT__ "DMGetDefaultGlobalSection" 2744 /*@ 2745 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2746 2747 Collective on DM 2748 2749 Input Parameter: 2750 . dm - The DM 2751 2752 Output Parameter: 2753 . section - The PetscSection 2754 2755 Level: intermediate 2756 2757 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2758 2759 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2760 @*/ 2761 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2762 { 2763 PetscErrorCode ierr; 2764 2765 PetscFunctionBegin; 2766 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2767 PetscValidPointer(section, 2); 2768 if (!dm->defaultGlobalSection) { 2769 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2770 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2771 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2772 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2773 } 2774 *section = dm->defaultGlobalSection; 2775 PetscFunctionReturn(0); 2776 } 2777 2778 #undef __FUNCT__ 2779 #define __FUNCT__ "DMSetDefaultGlobalSection" 2780 /*@ 2781 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2782 2783 Input Parameters: 2784 + dm - The DM 2785 - section - The PetscSection, or NULL 2786 2787 Level: intermediate 2788 2789 Note: Any existing Section will be destroyed 2790 2791 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2792 @*/ 2793 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2794 { 2795 PetscErrorCode ierr; 2796 2797 PetscFunctionBegin; 2798 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2799 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2800 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2801 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2802 dm->defaultGlobalSection = section; 2803 PetscFunctionReturn(0); 2804 } 2805 2806 #undef __FUNCT__ 2807 #define __FUNCT__ "DMGetDefaultSF" 2808 /*@ 2809 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2810 it is created from the default PetscSection layouts in the DM. 2811 2812 Input Parameter: 2813 . dm - The DM 2814 2815 Output Parameter: 2816 . sf - The PetscSF 2817 2818 Level: intermediate 2819 2820 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2821 2822 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 2823 @*/ 2824 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 2825 { 2826 PetscInt nroots; 2827 PetscErrorCode ierr; 2828 2829 PetscFunctionBegin; 2830 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2831 PetscValidPointer(sf, 2); 2832 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 2833 if (nroots < 0) { 2834 PetscSection section, gSection; 2835 2836 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 2837 if (section) { 2838 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 2839 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 2840 } else { 2841 *sf = NULL; 2842 PetscFunctionReturn(0); 2843 } 2844 } 2845 *sf = dm->defaultSF; 2846 PetscFunctionReturn(0); 2847 } 2848 2849 #undef __FUNCT__ 2850 #define __FUNCT__ "DMSetDefaultSF" 2851 /*@ 2852 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 2853 2854 Input Parameters: 2855 + dm - The DM 2856 - sf - The PetscSF 2857 2858 Level: intermediate 2859 2860 Note: Any previous SF is destroyed 2861 2862 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 2863 @*/ 2864 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 2865 { 2866 PetscErrorCode ierr; 2867 2868 PetscFunctionBegin; 2869 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2870 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 2871 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 2872 dm->defaultSF = sf; 2873 PetscFunctionReturn(0); 2874 } 2875 2876 #undef __FUNCT__ 2877 #define __FUNCT__ "DMCreateDefaultSF" 2878 /*@C 2879 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 2880 describing the data layout. 2881 2882 Input Parameters: 2883 + dm - The DM 2884 . localSection - PetscSection describing the local data layout 2885 - globalSection - PetscSection describing the global data layout 2886 2887 Level: intermediate 2888 2889 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 2890 @*/ 2891 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 2892 { 2893 MPI_Comm comm; 2894 PetscLayout layout; 2895 const PetscInt *ranges; 2896 PetscInt *local; 2897 PetscSFNode *remote; 2898 PetscInt pStart, pEnd, p, nroots, nleaves, l; 2899 PetscMPIInt size, rank; 2900 PetscErrorCode ierr; 2901 2902 PetscFunctionBegin; 2903 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 2904 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2905 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 2906 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2907 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 2908 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 2909 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 2910 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 2911 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 2912 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 2913 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 2914 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 2915 PetscInt gdof, gcdof; 2916 2917 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 2918 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 2919 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 2920 } 2921 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 2922 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 2923 for (p = pStart, l = 0; p < pEnd; ++p) { 2924 const PetscInt *cind; 2925 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 2926 2927 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 2928 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 2929 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 2930 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 2931 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 2932 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 2933 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 2934 if (!gdof) continue; /* Censored point */ 2935 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 2936 if (gsize != dof-cdof) { 2937 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 2938 cdof = 0; /* Ignore constraints */ 2939 } 2940 for (d = 0, c = 0; d < dof; ++d) { 2941 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 2942 local[l+d-c] = off+d; 2943 } 2944 if (gdof < 0) { 2945 for (d = 0; d < gsize; ++d, ++l) { 2946 PetscInt offset = -(goff+1) + d, r; 2947 2948 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 2949 if (r < 0) r = -(r+2); 2950 remote[l].rank = r; 2951 remote[l].index = offset - ranges[r]; 2952 } 2953 } else { 2954 for (d = 0; d < gsize; ++d, ++l) { 2955 remote[l].rank = rank; 2956 remote[l].index = goff+d - ranges[rank]; 2957 } 2958 } 2959 } 2960 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 2961 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 2962 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 2963 PetscFunctionReturn(0); 2964 } 2965 2966 #undef __FUNCT__ 2967 #define __FUNCT__ "DMGetPointSF" 2968 /*@ 2969 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 2970 2971 Input Parameter: 2972 . dm - The DM 2973 2974 Output Parameter: 2975 . sf - The PetscSF 2976 2977 Level: intermediate 2978 2979 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2980 2981 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 2982 @*/ 2983 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 2984 { 2985 PetscFunctionBegin; 2986 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2987 PetscValidPointer(sf, 2); 2988 *sf = dm->sf; 2989 PetscFunctionReturn(0); 2990 } 2991 2992 #undef __FUNCT__ 2993 #define __FUNCT__ "DMSetPointSF" 2994 /*@ 2995 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 2996 2997 Input Parameters: 2998 + dm - The DM 2999 - sf - The PetscSF 3000 3001 Level: intermediate 3002 3003 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3004 @*/ 3005 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3006 { 3007 PetscErrorCode ierr; 3008 3009 PetscFunctionBegin; 3010 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3011 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3012 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3013 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3014 dm->sf = sf; 3015 PetscFunctionReturn(0); 3016 } 3017 3018 #undef __FUNCT__ 3019 #define __FUNCT__ "DMGetNumFields" 3020 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3021 { 3022 PetscFunctionBegin; 3023 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3024 PetscValidPointer(numFields, 2); 3025 *numFields = dm->numFields; 3026 PetscFunctionReturn(0); 3027 } 3028 3029 #undef __FUNCT__ 3030 #define __FUNCT__ "DMSetNumFields" 3031 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3032 { 3033 PetscInt f; 3034 PetscErrorCode ierr; 3035 3036 PetscFunctionBegin; 3037 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3038 for (f = 0; f < dm->numFields; ++f) { 3039 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3040 } 3041 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3042 dm->numFields = numFields; 3043 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3044 for (f = 0; f < dm->numFields; ++f) { 3045 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3046 } 3047 PetscFunctionReturn(0); 3048 } 3049 3050 #undef __FUNCT__ 3051 #define __FUNCT__ "DMGetField" 3052 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3053 { 3054 PetscFunctionBegin; 3055 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3056 PetscValidPointer(field, 2); 3057 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3058 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3059 *field = dm->fields[f]; 3060 PetscFunctionReturn(0); 3061 } 3062 3063 #undef __FUNCT__ 3064 #define __FUNCT__ "DMSetCoordinates" 3065 /*@ 3066 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3067 3068 Collective on DM 3069 3070 Input Parameters: 3071 + dm - the DM 3072 - c - coordinate vector 3073 3074 Note: 3075 The coordinates do include those for ghost points, which are in the local vector 3076 3077 Level: intermediate 3078 3079 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3080 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3081 @*/ 3082 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3083 { 3084 PetscErrorCode ierr; 3085 3086 PetscFunctionBegin; 3087 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3088 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3089 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3090 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3091 dm->coordinates = c; 3092 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3093 PetscFunctionReturn(0); 3094 } 3095 3096 #undef __FUNCT__ 3097 #define __FUNCT__ "DMSetCoordinatesLocal" 3098 /*@ 3099 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3100 3101 Collective on DM 3102 3103 Input Parameters: 3104 + dm - the DM 3105 - c - coordinate vector 3106 3107 Note: 3108 The coordinates of ghost points can be set using DMSetCoordinates() 3109 followed by DMGetCoordinatesLocal(). This is intended to enable the 3110 setting of ghost coordinates outside of the domain. 3111 3112 Level: intermediate 3113 3114 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3115 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3116 @*/ 3117 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3118 { 3119 PetscErrorCode ierr; 3120 3121 PetscFunctionBegin; 3122 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3123 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3124 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3125 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3126 3127 dm->coordinatesLocal = c; 3128 3129 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3130 PetscFunctionReturn(0); 3131 } 3132 3133 #undef __FUNCT__ 3134 #define __FUNCT__ "DMGetCoordinates" 3135 /*@ 3136 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3137 3138 Not Collective 3139 3140 Input Parameter: 3141 . dm - the DM 3142 3143 Output Parameter: 3144 . c - global coordinate vector 3145 3146 Note: 3147 This is a borrowed reference, so the user should NOT destroy this vector 3148 3149 Each process has only the local coordinates (does NOT have the ghost coordinates). 3150 3151 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3152 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3153 3154 Level: intermediate 3155 3156 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3157 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3158 @*/ 3159 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3160 { 3161 PetscErrorCode ierr; 3162 3163 PetscFunctionBegin; 3164 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3165 PetscValidPointer(c,2); 3166 if (!dm->coordinates && dm->coordinatesLocal) { 3167 DM cdm = NULL; 3168 3169 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3170 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3171 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3172 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3173 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3174 } 3175 *c = dm->coordinates; 3176 PetscFunctionReturn(0); 3177 } 3178 3179 #undef __FUNCT__ 3180 #define __FUNCT__ "DMGetCoordinatesLocal" 3181 /*@ 3182 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3183 3184 Collective on DM 3185 3186 Input Parameter: 3187 . dm - the DM 3188 3189 Output Parameter: 3190 . c - coordinate vector 3191 3192 Note: 3193 This is a borrowed reference, so the user should NOT destroy this vector 3194 3195 Each process has the local and ghost coordinates 3196 3197 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3198 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3199 3200 Level: intermediate 3201 3202 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3203 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3204 @*/ 3205 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3206 { 3207 PetscErrorCode ierr; 3208 3209 PetscFunctionBegin; 3210 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3211 PetscValidPointer(c,2); 3212 if (!dm->coordinatesLocal && dm->coordinates) { 3213 DM cdm = NULL; 3214 3215 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3216 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3217 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3218 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3219 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3220 } 3221 *c = dm->coordinatesLocal; 3222 PetscFunctionReturn(0); 3223 } 3224 3225 #undef __FUNCT__ 3226 #define __FUNCT__ "DMGetCoordinateDM" 3227 /*@ 3228 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3229 3230 Collective on DM 3231 3232 Input Parameter: 3233 . dm - the DM 3234 3235 Output Parameter: 3236 . cdm - coordinate DM 3237 3238 Level: intermediate 3239 3240 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3241 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3242 @*/ 3243 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3244 { 3245 PetscErrorCode ierr; 3246 3247 PetscFunctionBegin; 3248 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3249 PetscValidPointer(cdm,2); 3250 if (!dm->coordinateDM) { 3251 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3252 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3253 } 3254 *cdm = dm->coordinateDM; 3255 PetscFunctionReturn(0); 3256 } 3257 3258 #undef __FUNCT__ 3259 #define __FUNCT__ "DMLocatePoints" 3260 /*@ 3261 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3262 3263 Not collective 3264 3265 Input Parameters: 3266 + dm - The DM 3267 - v - The Vec of points 3268 3269 Output Parameter: 3270 . cells - The local cell numbers for cells which contain the points 3271 3272 Level: developer 3273 3274 .keywords: point location, mesh 3275 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3276 @*/ 3277 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3278 { 3279 PetscErrorCode ierr; 3280 3281 PetscFunctionBegin; 3282 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3283 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3284 PetscValidPointer(cells,3); 3285 if (dm->ops->locatepoints) { 3286 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3287 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3288 PetscFunctionReturn(0); 3289 } 3290