1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMViewFromOptions" 9 /* 10 Processes command line options to determine if/how a dm 11 is to be viewed. 12 13 */ 14 PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[]) 15 { 16 PetscErrorCode ierr; 17 PetscBool flg; 18 PetscViewer viewer; 19 PetscViewerFormat format; 20 21 PetscFunctionBegin; 22 if (prefix) { 23 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 24 } else { 25 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 26 } 27 if (flg) { 28 ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr); 29 ierr = DMView(dm,viewer);CHKERRQ(ierr); 30 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 31 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 32 } 33 PetscFunctionReturn(0); 34 } 35 36 37 #undef __FUNCT__ 38 #define __FUNCT__ "DMCreate" 39 /*@ 40 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 41 42 If you never call DMSetType() it will generate an 43 error when you try to use the vector. 44 45 Collective on MPI_Comm 46 47 Input Parameter: 48 . comm - The communicator for the DM object 49 50 Output Parameter: 51 . dm - The DM object 52 53 Level: beginner 54 55 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 56 @*/ 57 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 58 { 59 DM v; 60 PetscErrorCode ierr; 61 62 PetscFunctionBegin; 63 PetscValidPointer(dm,2); 64 *dm = NULL; 65 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 66 ierr = VecInitializePackage();CHKERRQ(ierr); 67 ierr = MatInitializePackage();CHKERRQ(ierr); 68 ierr = DMInitializePackage();CHKERRQ(ierr); 69 #endif 70 71 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 72 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 73 74 75 v->ltogmap = NULL; 76 v->ltogmapb = NULL; 77 v->bs = 1; 78 v->coloringtype = IS_COLORING_GLOBAL; 79 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 80 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 81 v->defaultSection = NULL; 82 v->defaultGlobalSection = NULL; 83 { 84 PetscInt i; 85 for (i = 0; i < 10; ++i) { 86 v->nullspaceConstructors[i] = NULL; 87 } 88 } 89 v->numFields = 0; 90 v->fields = NULL; 91 92 *dm = v; 93 PetscFunctionReturn(0); 94 } 95 96 #undef __FUNCT__ 97 #define __FUNCT__ "DMSetVecType" 98 /*@C 99 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 100 101 Logically Collective on DMDA 102 103 Input Parameter: 104 + da - initial distributed array 105 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 106 107 Options Database: 108 . -dm_vec_type ctype 109 110 Level: intermediate 111 112 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 113 @*/ 114 PetscErrorCode DMSetVecType(DM da,VecType ctype) 115 { 116 PetscErrorCode ierr; 117 118 PetscFunctionBegin; 119 PetscValidHeaderSpecific(da,DM_CLASSID,1); 120 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 121 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 122 PetscFunctionReturn(0); 123 } 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "VecGetDM" 127 /*@ 128 VecGetDM - Gets the DM defining the data layout of the vector 129 130 Not collective 131 132 Input Parameter: 133 . v - The Vec 134 135 Output Parameter: 136 . dm - The DM 137 138 Level: intermediate 139 140 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 141 @*/ 142 PetscErrorCode VecGetDM(Vec v, DM *dm) 143 { 144 PetscErrorCode ierr; 145 146 PetscFunctionBegin; 147 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 148 PetscValidPointer(dm,2); 149 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 150 PetscFunctionReturn(0); 151 } 152 153 #undef __FUNCT__ 154 #define __FUNCT__ "VecSetDM" 155 /*@ 156 VecSetDM - Sets the DM defining the data layout of the vector 157 158 Not collective 159 160 Input Parameters: 161 + v - The Vec 162 - dm - The DM 163 164 Level: intermediate 165 166 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 167 @*/ 168 PetscErrorCode VecSetDM(Vec v, DM dm) 169 { 170 PetscErrorCode ierr; 171 172 PetscFunctionBegin; 173 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 174 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 175 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 176 PetscFunctionReturn(0); 177 } 178 179 #undef __FUNCT__ 180 #define __FUNCT__ "DMSetMatType" 181 /*@C 182 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 183 184 Logically Collective on DM 185 186 Input Parameter: 187 + dm - the DM context 188 . ctype - the matrix type 189 190 Options Database: 191 . -dm_mat_type ctype 192 193 Level: intermediate 194 195 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 196 @*/ 197 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 198 { 199 PetscErrorCode ierr; 200 201 PetscFunctionBegin; 202 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 203 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 204 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 205 PetscFunctionReturn(0); 206 } 207 208 #undef __FUNCT__ 209 #define __FUNCT__ "MatGetDM" 210 /*@ 211 MatGetDM - Gets the DM defining the data layout of the matrix 212 213 Not collective 214 215 Input Parameter: 216 . A - The Mat 217 218 Output Parameter: 219 . dm - The DM 220 221 Level: intermediate 222 223 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 224 @*/ 225 PetscErrorCode MatGetDM(Mat A, DM *dm) 226 { 227 PetscErrorCode ierr; 228 229 PetscFunctionBegin; 230 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 231 PetscValidPointer(dm,2); 232 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 233 PetscFunctionReturn(0); 234 } 235 236 #undef __FUNCT__ 237 #define __FUNCT__ "MatSetDM" 238 /*@ 239 MatSetDM - Sets the DM defining the data layout of the matrix 240 241 Not collective 242 243 Input Parameters: 244 + A - The Mat 245 - dm - The DM 246 247 Level: intermediate 248 249 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 250 @*/ 251 PetscErrorCode MatSetDM(Mat A, DM dm) 252 { 253 PetscErrorCode ierr; 254 255 PetscFunctionBegin; 256 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 257 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 258 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 259 PetscFunctionReturn(0); 260 } 261 262 #undef __FUNCT__ 263 #define __FUNCT__ "DMSetOptionsPrefix" 264 /*@C 265 DMSetOptionsPrefix - Sets the prefix used for searching for all 266 DMDA options in the database. 267 268 Logically Collective on DMDA 269 270 Input Parameter: 271 + da - the DMDA context 272 - prefix - the prefix to prepend to all option names 273 274 Notes: 275 A hyphen (-) must NOT be given at the beginning of the prefix name. 276 The first character of all runtime options is AUTOMATICALLY the hyphen. 277 278 Level: advanced 279 280 .keywords: DMDA, set, options, prefix, database 281 282 .seealso: DMSetFromOptions() 283 @*/ 284 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 285 { 286 PetscErrorCode ierr; 287 288 PetscFunctionBegin; 289 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 290 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 291 PetscFunctionReturn(0); 292 } 293 294 #undef __FUNCT__ 295 #define __FUNCT__ "DMDestroy" 296 /*@ 297 DMDestroy - Destroys a vector packer or DMDA. 298 299 Collective on DM 300 301 Input Parameter: 302 . dm - the DM object to destroy 303 304 Level: developer 305 306 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 307 308 @*/ 309 PetscErrorCode DMDestroy(DM *dm) 310 { 311 PetscInt i, cnt = 0, f; 312 DMNamedVecLink nlink,nnext; 313 PetscErrorCode ierr; 314 315 PetscFunctionBegin; 316 if (!*dm) PetscFunctionReturn(0); 317 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 318 319 /* count all the circular references of DM and its contained Vecs */ 320 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 321 if ((*dm)->localin[i]) cnt++; 322 if ((*dm)->globalin[i]) cnt++; 323 } 324 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 325 if ((*dm)->x) { 326 DM obj; 327 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 328 if (obj == *dm) cnt++; 329 } 330 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 331 if ((*dm)->x) { 332 DM obj; 333 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 334 if (obj == *dm) cnt++; 335 } 336 337 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 338 /* 339 Need this test because the dm references the vectors that 340 reference the dm, so destroying the dm calls destroy on the 341 vectors that cause another destroy on the dm 342 */ 343 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 344 ((PetscObject) (*dm))->refct = 0; 345 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 346 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 347 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 348 } 349 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 350 nnext = nlink->next; 351 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 352 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 353 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 354 ierr = PetscFree(nlink);CHKERRQ(ierr); 355 } 356 (*dm)->namedglobal = NULL; 357 358 for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */ 359 nnext = nlink->next; 360 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 361 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 362 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 363 ierr = PetscFree(nlink);CHKERRQ(ierr); 364 } 365 (*dm)->namedlocal = NULL; 366 367 /* Destroy the list of hooks */ 368 { 369 DMCoarsenHookLink link,next; 370 for (link=(*dm)->coarsenhook; link; link=next) { 371 next = link->next; 372 ierr = PetscFree(link);CHKERRQ(ierr); 373 } 374 (*dm)->coarsenhook = NULL; 375 } 376 { 377 DMRefineHookLink link,next; 378 for (link=(*dm)->refinehook; link; link=next) { 379 next = link->next; 380 ierr = PetscFree(link);CHKERRQ(ierr); 381 } 382 (*dm)->refinehook = NULL; 383 } 384 { 385 DMSubDomainHookLink link,next; 386 for (link=(*dm)->subdomainhook; link; link=next) { 387 next = link->next; 388 ierr = PetscFree(link);CHKERRQ(ierr); 389 } 390 (*dm)->subdomainhook = NULL; 391 } 392 { 393 DMGlobalToLocalHookLink link,next; 394 for (link=(*dm)->gtolhook; link; link=next) { 395 next = link->next; 396 ierr = PetscFree(link);CHKERRQ(ierr); 397 } 398 (*dm)->gtolhook = NULL; 399 } 400 /* Destroy the work arrays */ 401 { 402 DMWorkLink link,next; 403 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 404 for (link=(*dm)->workin; link; link=next) { 405 next = link->next; 406 ierr = PetscFree(link->mem);CHKERRQ(ierr); 407 ierr = PetscFree(link);CHKERRQ(ierr); 408 } 409 (*dm)->workin = NULL; 410 } 411 412 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 413 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 414 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 415 416 if ((*dm)->ctx && (*dm)->ctxdestroy) { 417 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 418 } 419 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 420 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 421 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 422 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 423 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 424 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 425 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 426 427 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 428 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 429 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 430 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 431 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 432 433 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 434 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 435 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 436 437 for (f = 0; f < (*dm)->numFields; ++f) { 438 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 439 } 440 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 441 /* if memory was published with AMS then destroy it */ 442 ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr); 443 444 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 445 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 446 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 447 PetscFunctionReturn(0); 448 } 449 450 #undef __FUNCT__ 451 #define __FUNCT__ "DMSetUp" 452 /*@ 453 DMSetUp - sets up the data structures inside a DM object 454 455 Collective on DM 456 457 Input Parameter: 458 . dm - the DM object to setup 459 460 Level: developer 461 462 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 463 464 @*/ 465 PetscErrorCode DMSetUp(DM dm) 466 { 467 PetscErrorCode ierr; 468 469 PetscFunctionBegin; 470 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 471 if (dm->setupcalled) PetscFunctionReturn(0); 472 if (dm->ops->setup) { 473 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 474 } 475 dm->setupcalled = PETSC_TRUE; 476 PetscFunctionReturn(0); 477 } 478 479 #undef __FUNCT__ 480 #define __FUNCT__ "DMSetFromOptions" 481 /*@ 482 DMSetFromOptions - sets parameters in a DM from the options database 483 484 Collective on DM 485 486 Input Parameter: 487 . dm - the DM object to set options for 488 489 Options Database: 490 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 491 . -dm_vec_type <type> type of vector to create inside DM 492 . -dm_mat_type <type> type of matrix to create inside DM 493 - -dm_coloring_type <global or ghosted> 494 495 Level: developer 496 497 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 498 499 @*/ 500 PetscErrorCode DMSetFromOptions(DM dm) 501 { 502 char typeName[256] = MATAIJ; 503 PetscBool flg; 504 PetscErrorCode ierr; 505 506 PetscFunctionBegin; 507 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 508 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 509 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 510 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 511 if (flg) { 512 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 513 } 514 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 515 if (flg) { 516 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 517 } 518 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 519 if (dm->ops->setfromoptions) { 520 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 521 } 522 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 523 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 524 ierr = PetscOptionsEnd();CHKERRQ(ierr); 525 ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr); 526 PetscFunctionReturn(0); 527 } 528 529 #undef __FUNCT__ 530 #define __FUNCT__ "DMView" 531 /*@C 532 DMView - Views a vector packer or DMDA. 533 534 Collective on DM 535 536 Input Parameter: 537 + dm - the DM object to view 538 - v - the viewer 539 540 Level: developer 541 542 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 543 544 @*/ 545 PetscErrorCode DMView(DM dm,PetscViewer v) 546 { 547 PetscErrorCode ierr; 548 PetscBool isbinary; 549 550 PetscFunctionBegin; 551 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 552 if (!v) { 553 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 554 } 555 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 556 if (isbinary) { 557 PetscInt classid = DM_FILE_CLASSID; 558 char type[256]; 559 560 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 561 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 562 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 563 } 564 if (dm->ops->view) { 565 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 566 } 567 PetscFunctionReturn(0); 568 } 569 570 #undef __FUNCT__ 571 #define __FUNCT__ "DMCreateGlobalVector" 572 /*@ 573 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 574 575 Collective on DM 576 577 Input Parameter: 578 . dm - the DM object 579 580 Output Parameter: 581 . vec - the global vector 582 583 Level: beginner 584 585 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 586 587 @*/ 588 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 589 { 590 PetscErrorCode ierr; 591 592 PetscFunctionBegin; 593 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 594 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 595 PetscFunctionReturn(0); 596 } 597 598 #undef __FUNCT__ 599 #define __FUNCT__ "DMCreateLocalVector" 600 /*@ 601 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 602 603 Not Collective 604 605 Input Parameter: 606 . dm - the DM object 607 608 Output Parameter: 609 . vec - the local vector 610 611 Level: beginner 612 613 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 614 615 @*/ 616 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 617 { 618 PetscErrorCode ierr; 619 620 PetscFunctionBegin; 621 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 622 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 623 PetscFunctionReturn(0); 624 } 625 626 #undef __FUNCT__ 627 #define __FUNCT__ "DMGetLocalToGlobalMapping" 628 /*@ 629 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 630 631 Collective on DM 632 633 Input Parameter: 634 . dm - the DM that provides the mapping 635 636 Output Parameter: 637 . ltog - the mapping 638 639 Level: intermediate 640 641 Notes: 642 This mapping can then be used by VecSetLocalToGlobalMapping() or 643 MatSetLocalToGlobalMapping(). 644 645 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 646 @*/ 647 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 648 { 649 PetscErrorCode ierr; 650 651 PetscFunctionBegin; 652 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 653 PetscValidPointer(ltog,2); 654 if (!dm->ltogmap) { 655 PetscSection section, sectionGlobal; 656 657 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 658 if (section) { 659 PetscInt *ltog; 660 PetscInt pStart, pEnd, size, p, l; 661 662 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 663 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 664 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 665 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 666 for (p = pStart, l = 0; p < pEnd; ++p) { 667 PetscInt dof, off, c; 668 669 /* Should probably use constrained dofs */ 670 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 671 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 672 for (c = 0; c < dof; ++c, ++l) { 673 ltog[l] = off+c; 674 } 675 } 676 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 677 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 678 } else { 679 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 680 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 681 } 682 } 683 *ltog = dm->ltogmap; 684 PetscFunctionReturn(0); 685 } 686 687 #undef __FUNCT__ 688 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 689 /*@ 690 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 691 692 Collective on DM 693 694 Input Parameter: 695 . da - the distributed array that provides the mapping 696 697 Output Parameter: 698 . ltog - the block mapping 699 700 Level: intermediate 701 702 Notes: 703 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 704 MatSetLocalToGlobalMappingBlock(). 705 706 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 707 @*/ 708 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 709 { 710 PetscErrorCode ierr; 711 712 PetscFunctionBegin; 713 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 714 PetscValidPointer(ltog,2); 715 if (!dm->ltogmapb) { 716 PetscInt bs; 717 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 718 if (bs > 1) { 719 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 720 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 721 } else { 722 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 723 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 724 } 725 } 726 *ltog = dm->ltogmapb; 727 PetscFunctionReturn(0); 728 } 729 730 #undef __FUNCT__ 731 #define __FUNCT__ "DMGetBlockSize" 732 /*@ 733 DMGetBlockSize - Gets the inherent block size associated with a DM 734 735 Not Collective 736 737 Input Parameter: 738 . dm - the DM with block structure 739 740 Output Parameter: 741 . bs - the block size, 1 implies no exploitable block structure 742 743 Level: intermediate 744 745 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 746 @*/ 747 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 748 { 749 PetscFunctionBegin; 750 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 751 PetscValidPointer(bs,2); 752 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 753 *bs = dm->bs; 754 PetscFunctionReturn(0); 755 } 756 757 #undef __FUNCT__ 758 #define __FUNCT__ "DMCreateInterpolation" 759 /*@ 760 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 761 762 Collective on DM 763 764 Input Parameter: 765 + dm1 - the DM object 766 - dm2 - the second, finer DM object 767 768 Output Parameter: 769 + mat - the interpolation 770 - vec - the scaling (optional) 771 772 Level: developer 773 774 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 775 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 776 777 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 778 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 779 780 781 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 782 783 @*/ 784 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 785 { 786 PetscErrorCode ierr; 787 788 PetscFunctionBegin; 789 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 790 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 791 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 792 PetscFunctionReturn(0); 793 } 794 795 #undef __FUNCT__ 796 #define __FUNCT__ "DMCreateInjection" 797 /*@ 798 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 799 800 Collective on DM 801 802 Input Parameter: 803 + dm1 - the DM object 804 - dm2 - the second, finer DM object 805 806 Output Parameter: 807 . ctx - the injection 808 809 Level: developer 810 811 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 812 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 813 814 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 815 816 @*/ 817 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 818 { 819 PetscErrorCode ierr; 820 821 PetscFunctionBegin; 822 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 823 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 824 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 825 PetscFunctionReturn(0); 826 } 827 828 #undef __FUNCT__ 829 #define __FUNCT__ "DMCreateColoring" 830 /*@C 831 DMCreateColoring - Gets coloring for a DMDA or DMComposite 832 833 Collective on DM 834 835 Input Parameter: 836 + dm - the DM object 837 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 838 - matype - either MATAIJ or MATBAIJ 839 840 Output Parameter: 841 . coloring - the coloring 842 843 Level: developer 844 845 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 846 847 @*/ 848 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 849 { 850 PetscErrorCode ierr; 851 852 PetscFunctionBegin; 853 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 854 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 855 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 856 PetscFunctionReturn(0); 857 } 858 859 #undef __FUNCT__ 860 #define __FUNCT__ "DMCreateMatrix" 861 /*@C 862 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 863 864 Collective on DM 865 866 Input Parameter: 867 + dm - the DM object 868 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 869 any type which inherits from one of these (such as MATAIJ) 870 871 Output Parameter: 872 . mat - the empty Jacobian 873 874 Level: beginner 875 876 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 877 do not need to do it yourself. 878 879 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 880 the nonzero pattern call DMDASetMatPreallocateOnly() 881 882 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 883 internally by PETSc. 884 885 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 886 the indices for the global numbering for DMDAs which is complicated. 887 888 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 889 890 @*/ 891 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 892 { 893 PetscErrorCode ierr; 894 895 PetscFunctionBegin; 896 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 897 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 898 ierr = MatInitializePackage();CHKERRQ(ierr); 899 #endif 900 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 901 PetscValidPointer(mat,3); 902 if (dm->mattype) { 903 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 904 } else { 905 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 906 } 907 PetscFunctionReturn(0); 908 } 909 910 #undef __FUNCT__ 911 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 912 /*@ 913 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 914 preallocated but the nonzero structure and zero values will not be set. 915 916 Logically Collective on DMDA 917 918 Input Parameter: 919 + dm - the DM 920 - only - PETSC_TRUE if only want preallocation 921 922 Level: developer 923 .seealso DMCreateMatrix() 924 @*/ 925 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 926 { 927 PetscFunctionBegin; 928 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 929 dm->prealloc_only = only; 930 PetscFunctionReturn(0); 931 } 932 933 #undef __FUNCT__ 934 #define __FUNCT__ "DMGetWorkArray" 935 /*@C 936 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 937 938 Not Collective 939 940 Input Parameters: 941 + dm - the DM object 942 . count - The minium size 943 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 944 945 Output Parameter: 946 . array - the work array 947 948 Level: developer 949 950 .seealso DMDestroy(), DMCreate() 951 @*/ 952 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 953 { 954 PetscErrorCode ierr; 955 DMWorkLink link; 956 size_t size; 957 958 PetscFunctionBegin; 959 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 960 PetscValidPointer(mem,4); 961 if (dm->workin) { 962 link = dm->workin; 963 dm->workin = dm->workin->next; 964 } else { 965 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 966 } 967 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 968 if (size*count > link->bytes) { 969 ierr = PetscFree(link->mem);CHKERRQ(ierr); 970 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 971 link->bytes = size*count; 972 } 973 link->next = dm->workout; 974 dm->workout = link; 975 *(void**)mem = link->mem; 976 PetscFunctionReturn(0); 977 } 978 979 #undef __FUNCT__ 980 #define __FUNCT__ "DMRestoreWorkArray" 981 /*@C 982 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 983 984 Not Collective 985 986 Input Parameters: 987 + dm - the DM object 988 . count - The minium size 989 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 990 991 Output Parameter: 992 . array - the work array 993 994 Level: developer 995 996 .seealso DMDestroy(), DMCreate() 997 @*/ 998 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 999 { 1000 DMWorkLink *p,link; 1001 1002 PetscFunctionBegin; 1003 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1004 PetscValidPointer(mem,4); 1005 for (p=&dm->workout; (link=*p); p=&link->next) { 1006 if (link->mem == *(void**)mem) { 1007 *p = link->next; 1008 link->next = dm->workin; 1009 dm->workin = link; 1010 *(void**)mem = NULL; 1011 PetscFunctionReturn(0); 1012 } 1013 } 1014 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1015 PetscFunctionReturn(0); 1016 } 1017 1018 #undef __FUNCT__ 1019 #define __FUNCT__ "DMSetNullSpaceConstructor" 1020 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1021 { 1022 PetscFunctionBegin; 1023 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1024 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1025 dm->nullspaceConstructors[field] = nullsp; 1026 PetscFunctionReturn(0); 1027 } 1028 1029 #undef __FUNCT__ 1030 #define __FUNCT__ "DMCreateFieldIS" 1031 /*@C 1032 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1033 1034 Not collective 1035 1036 Input Parameter: 1037 . dm - the DM object 1038 1039 Output Parameters: 1040 + numFields - The number of fields (or NULL if not requested) 1041 . fieldNames - The name for each field (or NULL if not requested) 1042 - fields - The global indices for each field (or NULL if not requested) 1043 1044 Level: intermediate 1045 1046 Notes: 1047 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1048 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1049 PetscFree(). 1050 1051 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1052 @*/ 1053 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1054 { 1055 PetscSection section, sectionGlobal; 1056 PetscErrorCode ierr; 1057 1058 PetscFunctionBegin; 1059 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1060 if (numFields) { 1061 PetscValidPointer(numFields,2); 1062 *numFields = 0; 1063 } 1064 if (fieldNames) { 1065 PetscValidPointer(fieldNames,3); 1066 *fieldNames = NULL; 1067 } 1068 if (fields) { 1069 PetscValidPointer(fields,4); 1070 *fields = NULL; 1071 } 1072 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1073 if (section) { 1074 PetscInt *fieldSizes, **fieldIndices; 1075 PetscInt nF, f, pStart, pEnd, p; 1076 1077 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1078 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1079 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr); 1080 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1081 for (f = 0; f < nF; ++f) { 1082 fieldSizes[f] = 0; 1083 } 1084 for (p = pStart; p < pEnd; ++p) { 1085 PetscInt gdof; 1086 1087 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1088 if (gdof > 0) { 1089 for (f = 0; f < nF; ++f) { 1090 PetscInt fdof, fcdof; 1091 1092 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1093 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1094 fieldSizes[f] += fdof-fcdof; 1095 } 1096 } 1097 } 1098 for (f = 0; f < nF; ++f) { 1099 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1100 fieldSizes[f] = 0; 1101 } 1102 for (p = pStart; p < pEnd; ++p) { 1103 PetscInt gdof, goff; 1104 1105 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1106 if (gdof > 0) { 1107 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1108 for (f = 0; f < nF; ++f) { 1109 PetscInt fdof, fcdof, fc; 1110 1111 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1112 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1113 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1114 fieldIndices[f][fieldSizes[f]] = goff++; 1115 } 1116 } 1117 } 1118 } 1119 if (numFields) *numFields = nF; 1120 if (fieldNames) { 1121 ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr); 1122 for (f = 0; f < nF; ++f) { 1123 const char *fieldName; 1124 1125 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1126 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1127 } 1128 } 1129 if (fields) { 1130 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1131 for (f = 0; f < nF; ++f) { 1132 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1133 } 1134 } 1135 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1136 } else if (dm->ops->createfieldis) { 1137 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1138 } 1139 PetscFunctionReturn(0); 1140 } 1141 1142 1143 #undef __FUNCT__ 1144 #define __FUNCT__ "DMCreateFieldDecomposition" 1145 /*@C 1146 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1147 corresponding to different fields: each IS contains the global indices of the dofs of the 1148 corresponding field. The optional list of DMs define the DM for each subproblem. 1149 Generalizes DMCreateFieldIS(). 1150 1151 Not collective 1152 1153 Input Parameter: 1154 . dm - the DM object 1155 1156 Output Parameters: 1157 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1158 . namelist - The name for each field (or NULL if not requested) 1159 . islist - The global indices for each field (or NULL if not requested) 1160 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1161 1162 Level: intermediate 1163 1164 Notes: 1165 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1166 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1167 and all of the arrays should be freed with PetscFree(). 1168 1169 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1170 @*/ 1171 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1172 { 1173 PetscErrorCode ierr; 1174 1175 PetscFunctionBegin; 1176 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1177 if (len) { 1178 PetscValidPointer(len,2); 1179 *len = 0; 1180 } 1181 if (namelist) { 1182 PetscValidPointer(namelist,3); 1183 *namelist = 0; 1184 } 1185 if (islist) { 1186 PetscValidPointer(islist,4); 1187 *islist = 0; 1188 } 1189 if (dmlist) { 1190 PetscValidPointer(dmlist,5); 1191 *dmlist = 0; 1192 } 1193 /* 1194 Is it a good idea to apply the following check across all impls? 1195 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1196 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1197 */ 1198 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1199 if (!dm->ops->createfielddecomposition) { 1200 PetscSection section; 1201 PetscInt numFields, f; 1202 1203 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1204 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1205 if (section && numFields && dm->ops->createsubdm) { 1206 *len = numFields; 1207 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1208 for (f = 0; f < numFields; ++f) { 1209 const char *fieldName; 1210 1211 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1212 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1213 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1214 } 1215 } else { 1216 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1217 /* By default there are no DMs associated with subproblems. */ 1218 if (dmlist) *dmlist = NULL; 1219 } 1220 } else { 1221 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1222 } 1223 PetscFunctionReturn(0); 1224 } 1225 1226 #undef __FUNCT__ 1227 #define __FUNCT__ "DMCreateSubDM" 1228 /*@C 1229 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1230 The fields are defined by DMCreateFieldIS(). 1231 1232 Not collective 1233 1234 Input Parameters: 1235 + dm - the DM object 1236 . numFields - number of fields in this subproblem 1237 - len - The number of subproblems in the decomposition (or NULL if not requested) 1238 1239 Output Parameters: 1240 . is - The global indices for the subproblem 1241 - dm - The DM for the subproblem 1242 1243 Level: intermediate 1244 1245 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1246 @*/ 1247 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1248 { 1249 PetscErrorCode ierr; 1250 1251 PetscFunctionBegin; 1252 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1253 PetscValidPointer(fields,3); 1254 if (is) PetscValidPointer(is,4); 1255 if (subdm) PetscValidPointer(subdm,5); 1256 if (dm->ops->createsubdm) { 1257 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1258 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1259 PetscFunctionReturn(0); 1260 } 1261 1262 1263 #undef __FUNCT__ 1264 #define __FUNCT__ "DMCreateDomainDecomposition" 1265 /*@C 1266 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1267 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1268 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1269 define a nonoverlapping covering, while outer subdomains can overlap. 1270 The optional list of DMs define the DM for each subproblem. 1271 1272 Not collective 1273 1274 Input Parameter: 1275 . dm - the DM object 1276 1277 Output Parameters: 1278 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1279 . namelist - The name for each subdomain (or NULL if not requested) 1280 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1281 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1282 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1283 1284 Level: intermediate 1285 1286 Notes: 1287 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1288 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1289 and all of the arrays should be freed with PetscFree(). 1290 1291 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1292 @*/ 1293 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1294 { 1295 PetscErrorCode ierr; 1296 DMSubDomainHookLink link; 1297 PetscInt i,l; 1298 1299 PetscFunctionBegin; 1300 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1301 if (len) {PetscValidPointer(len,2); *len = 0;} 1302 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1303 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1304 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1305 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1306 /* 1307 Is it a good idea to apply the following check across all impls? 1308 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1309 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1310 */ 1311 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1312 if (dm->ops->createdomaindecomposition) { 1313 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1314 /* copy subdomain hooks and context over to the subdomain DMs */ 1315 if (dmlist) { 1316 for (i = 0; i < l; i++) { 1317 for (link=dm->subdomainhook; link; link=link->next) { 1318 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1319 } 1320 (*dmlist)[i]->ctx = dm->ctx; 1321 } 1322 } 1323 if (len) *len = l; 1324 } 1325 PetscFunctionReturn(0); 1326 } 1327 1328 1329 #undef __FUNCT__ 1330 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1331 /*@C 1332 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1333 1334 Not collective 1335 1336 Input Parameters: 1337 + dm - the DM object 1338 . n - the number of subdomain scatters 1339 - subdms - the local subdomains 1340 1341 Output Parameters: 1342 + n - the number of scatters returned 1343 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1344 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1345 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1346 1347 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1348 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1349 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1350 solution and residual data. 1351 1352 Level: developer 1353 1354 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1355 @*/ 1356 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1357 { 1358 PetscErrorCode ierr; 1359 1360 PetscFunctionBegin; 1361 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1362 PetscValidPointer(subdms,3); 1363 if (dm->ops->createddscatters) { 1364 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1365 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1366 PetscFunctionReturn(0); 1367 } 1368 1369 #undef __FUNCT__ 1370 #define __FUNCT__ "DMRefine" 1371 /*@ 1372 DMRefine - Refines a DM object 1373 1374 Collective on DM 1375 1376 Input Parameter: 1377 + dm - the DM object 1378 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1379 1380 Output Parameter: 1381 . dmf - the refined DM, or NULL 1382 1383 Note: If no refinement was done, the return value is NULL 1384 1385 Level: developer 1386 1387 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1388 @*/ 1389 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1390 { 1391 PetscErrorCode ierr; 1392 DMRefineHookLink link; 1393 1394 PetscFunctionBegin; 1395 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1396 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1397 if (*dmf) { 1398 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1399 1400 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1401 1402 (*dmf)->ctx = dm->ctx; 1403 (*dmf)->leveldown = dm->leveldown; 1404 (*dmf)->levelup = dm->levelup + 1; 1405 1406 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1407 for (link=dm->refinehook; link; link=link->next) { 1408 if (link->refinehook) { 1409 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1410 } 1411 } 1412 } 1413 PetscFunctionReturn(0); 1414 } 1415 1416 #undef __FUNCT__ 1417 #define __FUNCT__ "DMRefineHookAdd" 1418 /*@C 1419 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1420 1421 Logically Collective 1422 1423 Input Arguments: 1424 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1425 . refinehook - function to run when setting up a coarser level 1426 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1427 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1428 1429 Calling sequence of refinehook: 1430 $ refinehook(DM coarse,DM fine,void *ctx); 1431 1432 + coarse - coarse level DM 1433 . fine - fine level DM to interpolate problem to 1434 - ctx - optional user-defined function context 1435 1436 Calling sequence for interphook: 1437 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1438 1439 + coarse - coarse level DM 1440 . interp - matrix interpolating a coarse-level solution to the finer grid 1441 . fine - fine level DM to update 1442 - ctx - optional user-defined function context 1443 1444 Level: advanced 1445 1446 Notes: 1447 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1448 1449 If this function is called multiple times, the hooks will be run in the order they are added. 1450 1451 This function is currently not available from Fortran. 1452 1453 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1454 @*/ 1455 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1456 { 1457 PetscErrorCode ierr; 1458 DMRefineHookLink link,*p; 1459 1460 PetscFunctionBegin; 1461 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1462 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1463 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1464 link->refinehook = refinehook; 1465 link->interphook = interphook; 1466 link->ctx = ctx; 1467 link->next = NULL; 1468 *p = link; 1469 PetscFunctionReturn(0); 1470 } 1471 1472 #undef __FUNCT__ 1473 #define __FUNCT__ "DMInterpolate" 1474 /*@ 1475 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1476 1477 Collective if any hooks are 1478 1479 Input Arguments: 1480 + coarse - coarser DM to use as a base 1481 . restrct - interpolation matrix, apply using MatInterpolate() 1482 - fine - finer DM to update 1483 1484 Level: developer 1485 1486 .seealso: DMRefineHookAdd(), MatInterpolate() 1487 @*/ 1488 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1489 { 1490 PetscErrorCode ierr; 1491 DMRefineHookLink link; 1492 1493 PetscFunctionBegin; 1494 for (link=fine->refinehook; link; link=link->next) { 1495 if (link->interphook) { 1496 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1497 } 1498 } 1499 PetscFunctionReturn(0); 1500 } 1501 1502 #undef __FUNCT__ 1503 #define __FUNCT__ "DMGetRefineLevel" 1504 /*@ 1505 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1506 1507 Not Collective 1508 1509 Input Parameter: 1510 . dm - the DM object 1511 1512 Output Parameter: 1513 . level - number of refinements 1514 1515 Level: developer 1516 1517 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1518 1519 @*/ 1520 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1521 { 1522 PetscFunctionBegin; 1523 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1524 *level = dm->levelup; 1525 PetscFunctionReturn(0); 1526 } 1527 1528 #undef __FUNCT__ 1529 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1530 /*@C 1531 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1532 1533 Logically Collective 1534 1535 Input Arguments: 1536 + dm - the DM 1537 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1538 . endhook - function to run after DMGlobalToLocalEnd() has completed 1539 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1540 1541 Calling sequence for beginhook: 1542 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1543 1544 + dm - global DM 1545 . g - global vector 1546 . mode - mode 1547 . l - local vector 1548 - ctx - optional user-defined function context 1549 1550 1551 Calling sequence for endhook: 1552 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1553 1554 + global - global DM 1555 - ctx - optional user-defined function context 1556 1557 Level: advanced 1558 1559 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1560 @*/ 1561 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1562 { 1563 PetscErrorCode ierr; 1564 DMGlobalToLocalHookLink link,*p; 1565 1566 PetscFunctionBegin; 1567 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1568 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1569 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1570 link->beginhook = beginhook; 1571 link->endhook = endhook; 1572 link->ctx = ctx; 1573 link->next = NULL; 1574 *p = link; 1575 PetscFunctionReturn(0); 1576 } 1577 1578 #undef __FUNCT__ 1579 #define __FUNCT__ "DMGlobalToLocalBegin" 1580 /*@ 1581 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1582 1583 Neighbor-wise Collective on DM 1584 1585 Input Parameters: 1586 + dm - the DM object 1587 . g - the global vector 1588 . mode - INSERT_VALUES or ADD_VALUES 1589 - l - the local vector 1590 1591 1592 Level: beginner 1593 1594 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1595 1596 @*/ 1597 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1598 { 1599 PetscSF sf; 1600 PetscErrorCode ierr; 1601 DMGlobalToLocalHookLink link; 1602 1603 PetscFunctionBegin; 1604 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1605 for (link=dm->gtolhook; link; link=link->next) { 1606 if (link->beginhook) { 1607 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1608 } 1609 } 1610 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1611 if (sf) { 1612 PetscScalar *lArray, *gArray; 1613 1614 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1615 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1616 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1617 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1618 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1619 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1620 } else { 1621 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1622 } 1623 PetscFunctionReturn(0); 1624 } 1625 1626 #undef __FUNCT__ 1627 #define __FUNCT__ "DMGlobalToLocalEnd" 1628 /*@ 1629 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1630 1631 Neighbor-wise Collective on DM 1632 1633 Input Parameters: 1634 + dm - the DM object 1635 . g - the global vector 1636 . mode - INSERT_VALUES or ADD_VALUES 1637 - l - the local vector 1638 1639 1640 Level: beginner 1641 1642 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1643 1644 @*/ 1645 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1646 { 1647 PetscSF sf; 1648 PetscErrorCode ierr; 1649 PetscScalar *lArray, *gArray; 1650 DMGlobalToLocalHookLink link; 1651 1652 PetscFunctionBegin; 1653 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1654 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1655 if (sf) { 1656 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1657 1658 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1659 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1660 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1661 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1662 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1663 } else { 1664 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1665 } 1666 for (link=dm->gtolhook; link; link=link->next) { 1667 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1668 } 1669 PetscFunctionReturn(0); 1670 } 1671 1672 #undef __FUNCT__ 1673 #define __FUNCT__ "DMLocalToGlobalBegin" 1674 /*@ 1675 DMLocalToGlobalBegin - updates global vectors from local vectors 1676 1677 Neighbor-wise Collective on DM 1678 1679 Input Parameters: 1680 + dm - the DM object 1681 . l - the local vector 1682 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1683 base point. 1684 - - the global vector 1685 1686 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1687 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1688 global array to the final global array with VecAXPY(). 1689 1690 Level: beginner 1691 1692 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1693 1694 @*/ 1695 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1696 { 1697 PetscSF sf; 1698 PetscErrorCode ierr; 1699 1700 PetscFunctionBegin; 1701 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1702 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1703 if (sf) { 1704 MPI_Op op; 1705 PetscScalar *lArray, *gArray; 1706 1707 switch (mode) { 1708 case INSERT_VALUES: 1709 case INSERT_ALL_VALUES: 1710 op = MPIU_REPLACE; break; 1711 case ADD_VALUES: 1712 case ADD_ALL_VALUES: 1713 op = MPI_SUM; break; 1714 default: 1715 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1716 } 1717 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1718 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1719 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1720 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1721 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1722 } else { 1723 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1724 } 1725 PetscFunctionReturn(0); 1726 } 1727 1728 #undef __FUNCT__ 1729 #define __FUNCT__ "DMLocalToGlobalEnd" 1730 /*@ 1731 DMLocalToGlobalEnd - updates global vectors from local vectors 1732 1733 Neighbor-wise Collective on DM 1734 1735 Input Parameters: 1736 + dm - the DM object 1737 . l - the local vector 1738 . mode - INSERT_VALUES or ADD_VALUES 1739 - g - the global vector 1740 1741 1742 Level: beginner 1743 1744 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1745 1746 @*/ 1747 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1748 { 1749 PetscSF sf; 1750 PetscErrorCode ierr; 1751 1752 PetscFunctionBegin; 1753 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1754 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1755 if (sf) { 1756 MPI_Op op; 1757 PetscScalar *lArray, *gArray; 1758 1759 switch (mode) { 1760 case INSERT_VALUES: 1761 case INSERT_ALL_VALUES: 1762 op = MPIU_REPLACE; break; 1763 case ADD_VALUES: 1764 case ADD_ALL_VALUES: 1765 op = MPI_SUM; break; 1766 default: 1767 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1768 } 1769 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1770 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1771 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1772 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1773 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1774 } else { 1775 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1776 } 1777 PetscFunctionReturn(0); 1778 } 1779 1780 #undef __FUNCT__ 1781 #define __FUNCT__ "DMCoarsen" 1782 /*@ 1783 DMCoarsen - Coarsens a DM object 1784 1785 Collective on DM 1786 1787 Input Parameter: 1788 + dm - the DM object 1789 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1790 1791 Output Parameter: 1792 . dmc - the coarsened DM 1793 1794 Level: developer 1795 1796 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1797 1798 @*/ 1799 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1800 { 1801 PetscErrorCode ierr; 1802 DMCoarsenHookLink link; 1803 1804 PetscFunctionBegin; 1805 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1806 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1807 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1808 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1809 (*dmc)->ctx = dm->ctx; 1810 (*dmc)->levelup = dm->levelup; 1811 (*dmc)->leveldown = dm->leveldown + 1; 1812 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1813 for (link=dm->coarsenhook; link; link=link->next) { 1814 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1815 } 1816 PetscFunctionReturn(0); 1817 } 1818 1819 #undef __FUNCT__ 1820 #define __FUNCT__ "DMCoarsenHookAdd" 1821 /*@C 1822 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1823 1824 Logically Collective 1825 1826 Input Arguments: 1827 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1828 . coarsenhook - function to run when setting up a coarser level 1829 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1830 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1831 1832 Calling sequence of coarsenhook: 1833 $ coarsenhook(DM fine,DM coarse,void *ctx); 1834 1835 + fine - fine level DM 1836 . coarse - coarse level DM to restrict problem to 1837 - ctx - optional user-defined function context 1838 1839 Calling sequence for restricthook: 1840 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1841 1842 + fine - fine level DM 1843 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1844 . rscale - scaling vector for restriction 1845 . inject - matrix restricting by injection 1846 . coarse - coarse level DM to update 1847 - ctx - optional user-defined function context 1848 1849 Level: advanced 1850 1851 Notes: 1852 This function is only needed if auxiliary data needs to be set up on coarse grids. 1853 1854 If this function is called multiple times, the hooks will be run in the order they are added. 1855 1856 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1857 extract the finest level information from its context (instead of from the SNES). 1858 1859 This function is currently not available from Fortran. 1860 1861 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1862 @*/ 1863 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1864 { 1865 PetscErrorCode ierr; 1866 DMCoarsenHookLink link,*p; 1867 1868 PetscFunctionBegin; 1869 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1870 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1871 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1872 link->coarsenhook = coarsenhook; 1873 link->restricthook = restricthook; 1874 link->ctx = ctx; 1875 link->next = NULL; 1876 *p = link; 1877 PetscFunctionReturn(0); 1878 } 1879 1880 #undef __FUNCT__ 1881 #define __FUNCT__ "DMRestrict" 1882 /*@ 1883 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1884 1885 Collective if any hooks are 1886 1887 Input Arguments: 1888 + fine - finer DM to use as a base 1889 . restrct - restriction matrix, apply using MatRestrict() 1890 . inject - injection matrix, also use MatRestrict() 1891 - coarse - coarer DM to update 1892 1893 Level: developer 1894 1895 .seealso: DMCoarsenHookAdd(), MatRestrict() 1896 @*/ 1897 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1898 { 1899 PetscErrorCode ierr; 1900 DMCoarsenHookLink link; 1901 1902 PetscFunctionBegin; 1903 for (link=fine->coarsenhook; link; link=link->next) { 1904 if (link->restricthook) { 1905 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 1906 } 1907 } 1908 PetscFunctionReturn(0); 1909 } 1910 1911 #undef __FUNCT__ 1912 #define __FUNCT__ "DMSubDomainHookAdd" 1913 /*@C 1914 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 1915 1916 Logically Collective 1917 1918 Input Arguments: 1919 + global - global DM 1920 . ddhook - function to run to pass data to the decomposition DM upon its creation 1921 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1922 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1923 1924 1925 Calling sequence for ddhook: 1926 $ ddhook(DM global,DM block,void *ctx) 1927 1928 + global - global DM 1929 . block - block DM 1930 - ctx - optional user-defined function context 1931 1932 Calling sequence for restricthook: 1933 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 1934 1935 + global - global DM 1936 . out - scatter to the outer (with ghost and overlap points) block vector 1937 . in - scatter to block vector values only owned locally 1938 . block - block DM 1939 - ctx - optional user-defined function context 1940 1941 Level: advanced 1942 1943 Notes: 1944 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 1945 1946 If this function is called multiple times, the hooks will be run in the order they are added. 1947 1948 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1949 extract the global information from its context (instead of from the SNES). 1950 1951 This function is currently not available from Fortran. 1952 1953 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1954 @*/ 1955 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1956 { 1957 PetscErrorCode ierr; 1958 DMSubDomainHookLink link,*p; 1959 1960 PetscFunctionBegin; 1961 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1962 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1963 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 1964 link->restricthook = restricthook; 1965 link->ddhook = ddhook; 1966 link->ctx = ctx; 1967 link->next = NULL; 1968 *p = link; 1969 PetscFunctionReturn(0); 1970 } 1971 1972 #undef __FUNCT__ 1973 #define __FUNCT__ "DMSubDomainRestrict" 1974 /*@ 1975 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 1976 1977 Collective if any hooks are 1978 1979 Input Arguments: 1980 + fine - finer DM to use as a base 1981 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 1982 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 1983 - coarse - coarer DM to update 1984 1985 Level: developer 1986 1987 .seealso: DMCoarsenHookAdd(), MatRestrict() 1988 @*/ 1989 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 1990 { 1991 PetscErrorCode ierr; 1992 DMSubDomainHookLink link; 1993 1994 PetscFunctionBegin; 1995 for (link=global->subdomainhook; link; link=link->next) { 1996 if (link->restricthook) { 1997 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 1998 } 1999 } 2000 PetscFunctionReturn(0); 2001 } 2002 2003 #undef __FUNCT__ 2004 #define __FUNCT__ "DMGetCoarsenLevel" 2005 /*@ 2006 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2007 2008 Not Collective 2009 2010 Input Parameter: 2011 . dm - the DM object 2012 2013 Output Parameter: 2014 . level - number of coarsenings 2015 2016 Level: developer 2017 2018 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2019 2020 @*/ 2021 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2022 { 2023 PetscFunctionBegin; 2024 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2025 *level = dm->leveldown; 2026 PetscFunctionReturn(0); 2027 } 2028 2029 2030 2031 #undef __FUNCT__ 2032 #define __FUNCT__ "DMRefineHierarchy" 2033 /*@C 2034 DMRefineHierarchy - Refines a DM object, all levels at once 2035 2036 Collective on DM 2037 2038 Input Parameter: 2039 + dm - the DM object 2040 - nlevels - the number of levels of refinement 2041 2042 Output Parameter: 2043 . dmf - the refined DM hierarchy 2044 2045 Level: developer 2046 2047 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2048 2049 @*/ 2050 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2051 { 2052 PetscErrorCode ierr; 2053 2054 PetscFunctionBegin; 2055 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2056 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2057 if (nlevels == 0) PetscFunctionReturn(0); 2058 if (dm->ops->refinehierarchy) { 2059 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2060 } else if (dm->ops->refine) { 2061 PetscInt i; 2062 2063 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2064 for (i=1; i<nlevels; i++) { 2065 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2066 } 2067 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2068 PetscFunctionReturn(0); 2069 } 2070 2071 #undef __FUNCT__ 2072 #define __FUNCT__ "DMCoarsenHierarchy" 2073 /*@C 2074 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2075 2076 Collective on DM 2077 2078 Input Parameter: 2079 + dm - the DM object 2080 - nlevels - the number of levels of coarsening 2081 2082 Output Parameter: 2083 . dmc - the coarsened DM hierarchy 2084 2085 Level: developer 2086 2087 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2088 2089 @*/ 2090 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2091 { 2092 PetscErrorCode ierr; 2093 2094 PetscFunctionBegin; 2095 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2096 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2097 if (nlevels == 0) PetscFunctionReturn(0); 2098 PetscValidPointer(dmc,3); 2099 if (dm->ops->coarsenhierarchy) { 2100 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2101 } else if (dm->ops->coarsen) { 2102 PetscInt i; 2103 2104 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2105 for (i=1; i<nlevels; i++) { 2106 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2107 } 2108 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2109 PetscFunctionReturn(0); 2110 } 2111 2112 #undef __FUNCT__ 2113 #define __FUNCT__ "DMCreateAggregates" 2114 /*@ 2115 DMCreateAggregates - Gets the aggregates that map between 2116 grids associated with two DMs. 2117 2118 Collective on DM 2119 2120 Input Parameters: 2121 + dmc - the coarse grid DM 2122 - dmf - the fine grid DM 2123 2124 Output Parameters: 2125 . rest - the restriction matrix (transpose of the projection matrix) 2126 2127 Level: intermediate 2128 2129 .keywords: interpolation, restriction, multigrid 2130 2131 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2132 @*/ 2133 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2134 { 2135 PetscErrorCode ierr; 2136 2137 PetscFunctionBegin; 2138 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2139 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2140 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2141 PetscFunctionReturn(0); 2142 } 2143 2144 #undef __FUNCT__ 2145 #define __FUNCT__ "DMSetApplicationContextDestroy" 2146 /*@C 2147 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2148 2149 Not Collective 2150 2151 Input Parameters: 2152 + dm - the DM object 2153 - destroy - the destroy function 2154 2155 Level: intermediate 2156 2157 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2158 2159 @*/ 2160 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2161 { 2162 PetscFunctionBegin; 2163 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2164 dm->ctxdestroy = destroy; 2165 PetscFunctionReturn(0); 2166 } 2167 2168 #undef __FUNCT__ 2169 #define __FUNCT__ "DMSetApplicationContext" 2170 /*@ 2171 DMSetApplicationContext - Set a user context into a DM object 2172 2173 Not Collective 2174 2175 Input Parameters: 2176 + dm - the DM object 2177 - ctx - the user context 2178 2179 Level: intermediate 2180 2181 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2182 2183 @*/ 2184 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2185 { 2186 PetscFunctionBegin; 2187 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2188 dm->ctx = ctx; 2189 PetscFunctionReturn(0); 2190 } 2191 2192 #undef __FUNCT__ 2193 #define __FUNCT__ "DMGetApplicationContext" 2194 /*@ 2195 DMGetApplicationContext - Gets a user context from a DM object 2196 2197 Not Collective 2198 2199 Input Parameter: 2200 . dm - the DM object 2201 2202 Output Parameter: 2203 . ctx - the user context 2204 2205 Level: intermediate 2206 2207 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2208 2209 @*/ 2210 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2211 { 2212 PetscFunctionBegin; 2213 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2214 *(void**)ctx = dm->ctx; 2215 PetscFunctionReturn(0); 2216 } 2217 2218 #undef __FUNCT__ 2219 #define __FUNCT__ "DMSetVariableBounds" 2220 /*@C 2221 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2222 2223 Logically Collective on DM 2224 2225 Input Parameter: 2226 + dm - the DM object 2227 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2228 2229 Level: intermediate 2230 2231 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2232 DMSetJacobian() 2233 2234 @*/ 2235 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2236 { 2237 PetscFunctionBegin; 2238 dm->ops->computevariablebounds = f; 2239 PetscFunctionReturn(0); 2240 } 2241 2242 #undef __FUNCT__ 2243 #define __FUNCT__ "DMHasVariableBounds" 2244 /*@ 2245 DMHasVariableBounds - does the DM object have a variable bounds function? 2246 2247 Not Collective 2248 2249 Input Parameter: 2250 . dm - the DM object to destroy 2251 2252 Output Parameter: 2253 . flg - PETSC_TRUE if the variable bounds function exists 2254 2255 Level: developer 2256 2257 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2258 2259 @*/ 2260 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2261 { 2262 PetscFunctionBegin; 2263 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2264 PetscFunctionReturn(0); 2265 } 2266 2267 #undef __FUNCT__ 2268 #define __FUNCT__ "DMComputeVariableBounds" 2269 /*@C 2270 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2271 2272 Logically Collective on DM 2273 2274 Input Parameters: 2275 + dm - the DM object to destroy 2276 - x - current solution at which the bounds are computed 2277 2278 Output parameters: 2279 + xl - lower bound 2280 - xu - upper bound 2281 2282 Level: intermediate 2283 2284 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2285 2286 @*/ 2287 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2288 { 2289 PetscErrorCode ierr; 2290 2291 PetscFunctionBegin; 2292 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2293 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2294 if (dm->ops->computevariablebounds) { 2295 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2296 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2297 PetscFunctionReturn(0); 2298 } 2299 2300 #undef __FUNCT__ 2301 #define __FUNCT__ "DMHasColoring" 2302 /*@ 2303 DMHasColoring - does the DM object have a method of providing a coloring? 2304 2305 Not Collective 2306 2307 Input Parameter: 2308 . dm - the DM object 2309 2310 Output Parameter: 2311 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2312 2313 Level: developer 2314 2315 .seealso DMHasFunction(), DMCreateColoring() 2316 2317 @*/ 2318 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2319 { 2320 PetscFunctionBegin; 2321 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2322 PetscFunctionReturn(0); 2323 } 2324 2325 #undef __FUNCT__ 2326 #define __FUNCT__ "DMSetVec" 2327 /*@C 2328 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2329 2330 Collective on DM 2331 2332 Input Parameter: 2333 + dm - the DM object 2334 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2335 2336 Level: developer 2337 2338 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2339 2340 @*/ 2341 PetscErrorCode DMSetVec(DM dm,Vec x) 2342 { 2343 PetscErrorCode ierr; 2344 2345 PetscFunctionBegin; 2346 if (x) { 2347 if (!dm->x) { 2348 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2349 } 2350 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2351 } else if (dm->x) { 2352 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2353 } 2354 PetscFunctionReturn(0); 2355 } 2356 2357 PetscFunctionList DMList = NULL; 2358 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2359 2360 #undef __FUNCT__ 2361 #define __FUNCT__ "DMSetType" 2362 /*@C 2363 DMSetType - Builds a DM, for a particular DM implementation. 2364 2365 Collective on DM 2366 2367 Input Parameters: 2368 + dm - The DM object 2369 - method - The name of the DM type 2370 2371 Options Database Key: 2372 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2373 2374 Notes: 2375 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2376 2377 Level: intermediate 2378 2379 .keywords: DM, set, type 2380 .seealso: DMGetType(), DMCreate() 2381 @*/ 2382 PetscErrorCode DMSetType(DM dm, DMType method) 2383 { 2384 PetscErrorCode (*r)(DM); 2385 PetscBool match; 2386 PetscErrorCode ierr; 2387 2388 PetscFunctionBegin; 2389 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2390 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2391 if (match) PetscFunctionReturn(0); 2392 2393 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2394 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2395 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2396 2397 if (dm->ops->destroy) { 2398 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2399 dm->ops->destroy = NULL; 2400 } 2401 ierr = (*r)(dm);CHKERRQ(ierr); 2402 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2403 PetscFunctionReturn(0); 2404 } 2405 2406 #undef __FUNCT__ 2407 #define __FUNCT__ "DMGetType" 2408 /*@C 2409 DMGetType - Gets the DM type name (as a string) from the DM. 2410 2411 Not Collective 2412 2413 Input Parameter: 2414 . dm - The DM 2415 2416 Output Parameter: 2417 . type - The DM type name 2418 2419 Level: intermediate 2420 2421 .keywords: DM, get, type, name 2422 .seealso: DMSetType(), DMCreate() 2423 @*/ 2424 PetscErrorCode DMGetType(DM dm, DMType *type) 2425 { 2426 PetscErrorCode ierr; 2427 2428 PetscFunctionBegin; 2429 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2430 PetscValidCharPointer(type,2); 2431 if (!DMRegisterAllCalled) { 2432 ierr = DMRegisterAll();CHKERRQ(ierr); 2433 } 2434 *type = ((PetscObject)dm)->type_name; 2435 PetscFunctionReturn(0); 2436 } 2437 2438 #undef __FUNCT__ 2439 #define __FUNCT__ "DMConvert" 2440 /*@C 2441 DMConvert - Converts a DM to another DM, either of the same or different type. 2442 2443 Collective on DM 2444 2445 Input Parameters: 2446 + dm - the DM 2447 - newtype - new DM type (use "same" for the same type) 2448 2449 Output Parameter: 2450 . M - pointer to new DM 2451 2452 Notes: 2453 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2454 the MPI communicator of the generated DM is always the same as the communicator 2455 of the input DM. 2456 2457 Level: intermediate 2458 2459 .seealso: DMCreate() 2460 @*/ 2461 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2462 { 2463 DM B; 2464 char convname[256]; 2465 PetscBool sametype, issame; 2466 PetscErrorCode ierr; 2467 2468 PetscFunctionBegin; 2469 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2470 PetscValidType(dm,1); 2471 PetscValidPointer(M,3); 2472 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2473 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2474 { 2475 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2476 2477 /* 2478 Order of precedence: 2479 1) See if a specialized converter is known to the current DM. 2480 2) See if a specialized converter is known to the desired DM class. 2481 3) See if a good general converter is registered for the desired class 2482 4) See if a good general converter is known for the current matrix. 2483 5) Use a really basic converter. 2484 */ 2485 2486 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2487 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2488 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2489 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2490 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2491 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2492 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2493 if (conv) goto foundconv; 2494 2495 /* 2) See if a specialized converter is known to the desired DM class. */ 2496 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2497 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2498 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2499 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2500 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2501 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2502 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2503 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2504 if (conv) { 2505 ierr = DMDestroy(&B);CHKERRQ(ierr); 2506 goto foundconv; 2507 } 2508 2509 #if 0 2510 /* 3) See if a good general converter is registered for the desired class */ 2511 conv = B->ops->convertfrom; 2512 ierr = DMDestroy(&B);CHKERRQ(ierr); 2513 if (conv) goto foundconv; 2514 2515 /* 4) See if a good general converter is known for the current matrix */ 2516 if (dm->ops->convert) { 2517 conv = dm->ops->convert; 2518 } 2519 if (conv) goto foundconv; 2520 #endif 2521 2522 /* 5) Use a really basic converter. */ 2523 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2524 2525 foundconv: 2526 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2527 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2528 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2529 } 2530 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2531 PetscFunctionReturn(0); 2532 } 2533 2534 /*--------------------------------------------------------------------------------------------------------------------*/ 2535 2536 #undef __FUNCT__ 2537 #define __FUNCT__ "DMRegister" 2538 /*@C 2539 DMRegister - Adds a new DM component implementation 2540 2541 Not Collective 2542 2543 Input Parameters: 2544 + name - The name of a new user-defined creation routine 2545 - create_func - The creation routine itself 2546 2547 Notes: 2548 DMRegister() may be called multiple times to add several user-defined DMs 2549 2550 2551 Sample usage: 2552 .vb 2553 DMRegister("my_da", MyDMCreate); 2554 .ve 2555 2556 Then, your DM type can be chosen with the procedural interface via 2557 .vb 2558 DMCreate(MPI_Comm, DM *); 2559 DMSetType(DM,"my_da"); 2560 .ve 2561 or at runtime via the option 2562 .vb 2563 -da_type my_da 2564 .ve 2565 2566 Level: advanced 2567 2568 .keywords: DM, register 2569 .seealso: DMRegisterAll(), DMRegisterDestroy() 2570 2571 @*/ 2572 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2573 { 2574 PetscErrorCode ierr; 2575 2576 PetscFunctionBegin; 2577 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2578 PetscFunctionReturn(0); 2579 } 2580 2581 #undef __FUNCT__ 2582 #define __FUNCT__ "DMLoad" 2583 /*@C 2584 DMLoad - Loads a DM that has been stored in binary with DMView(). 2585 2586 Collective on PetscViewer 2587 2588 Input Parameters: 2589 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2590 some related function before a call to DMLoad(). 2591 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2592 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2593 2594 Level: intermediate 2595 2596 Notes: 2597 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2598 2599 Notes for advanced users: 2600 Most users should not need to know the details of the binary storage 2601 format, since DMLoad() and DMView() completely hide these details. 2602 But for anyone who's interested, the standard binary matrix storage 2603 format is 2604 .vb 2605 has not yet been determined 2606 .ve 2607 2608 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2609 @*/ 2610 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2611 { 2612 PetscErrorCode ierr; 2613 PetscBool isbinary; 2614 PetscInt classid; 2615 char type[256]; 2616 2617 PetscFunctionBegin; 2618 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2619 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2620 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2621 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2622 2623 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2624 if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file"); 2625 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2626 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2627 if (newdm->ops->load) { 2628 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2629 } 2630 PetscFunctionReturn(0); 2631 } 2632 2633 /******************************** FEM Support **********************************/ 2634 2635 #undef __FUNCT__ 2636 #define __FUNCT__ "DMPrintCellVector" 2637 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2638 { 2639 PetscInt f; 2640 PetscErrorCode ierr; 2641 2642 PetscFunctionBegin; 2643 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2644 for (f = 0; f < len; ++f) { 2645 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2646 } 2647 PetscFunctionReturn(0); 2648 } 2649 2650 #undef __FUNCT__ 2651 #define __FUNCT__ "DMPrintCellMatrix" 2652 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2653 { 2654 PetscInt f, g; 2655 PetscErrorCode ierr; 2656 2657 PetscFunctionBegin; 2658 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2659 for (f = 0; f < rows; ++f) { 2660 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2661 for (g = 0; g < cols; ++g) { 2662 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2663 } 2664 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2665 } 2666 PetscFunctionReturn(0); 2667 } 2668 2669 #undef __FUNCT__ 2670 #define __FUNCT__ "DMGetDefaultSection" 2671 /*@ 2672 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2673 2674 Input Parameter: 2675 . dm - The DM 2676 2677 Output Parameter: 2678 . section - The PetscSection 2679 2680 Level: intermediate 2681 2682 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2683 2684 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2685 @*/ 2686 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2687 { 2688 PetscFunctionBegin; 2689 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2690 PetscValidPointer(section, 2); 2691 *section = dm->defaultSection; 2692 PetscFunctionReturn(0); 2693 } 2694 2695 #undef __FUNCT__ 2696 #define __FUNCT__ "DMSetDefaultSection" 2697 /*@ 2698 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2699 2700 Input Parameters: 2701 + dm - The DM 2702 - section - The PetscSection 2703 2704 Level: intermediate 2705 2706 Note: Any existing Section will be destroyed 2707 2708 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2709 @*/ 2710 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2711 { 2712 PetscInt numFields; 2713 PetscInt f; 2714 PetscErrorCode ierr; 2715 2716 PetscFunctionBegin; 2717 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2718 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2719 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2720 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2721 dm->defaultSection = section; 2722 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2723 if (numFields) { 2724 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2725 for (f = 0; f < numFields; ++f) { 2726 const char *name; 2727 2728 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2729 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2730 } 2731 } 2732 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2733 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2734 PetscFunctionReturn(0); 2735 } 2736 2737 #undef __FUNCT__ 2738 #define __FUNCT__ "DMGetDefaultGlobalSection" 2739 /*@ 2740 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2741 2742 Collective on DM 2743 2744 Input Parameter: 2745 . dm - The DM 2746 2747 Output Parameter: 2748 . section - The PetscSection 2749 2750 Level: intermediate 2751 2752 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2753 2754 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2755 @*/ 2756 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2757 { 2758 PetscErrorCode ierr; 2759 2760 PetscFunctionBegin; 2761 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2762 PetscValidPointer(section, 2); 2763 if (!dm->defaultGlobalSection) { 2764 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2765 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2766 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2767 } 2768 *section = dm->defaultGlobalSection; 2769 PetscFunctionReturn(0); 2770 } 2771 2772 #undef __FUNCT__ 2773 #define __FUNCT__ "DMSetDefaultGlobalSection" 2774 /*@ 2775 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2776 2777 Input Parameters: 2778 + dm - The DM 2779 - section - The PetscSection, or NULL 2780 2781 Level: intermediate 2782 2783 Note: Any existing Section will be destroyed 2784 2785 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2786 @*/ 2787 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2788 { 2789 PetscErrorCode ierr; 2790 2791 PetscFunctionBegin; 2792 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2793 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2794 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2795 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2796 dm->defaultGlobalSection = section; 2797 PetscFunctionReturn(0); 2798 } 2799 2800 #undef __FUNCT__ 2801 #define __FUNCT__ "DMGetDefaultSF" 2802 /*@ 2803 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2804 it is created from the default PetscSection layouts in the DM. 2805 2806 Input Parameter: 2807 . dm - The DM 2808 2809 Output Parameter: 2810 . sf - The PetscSF 2811 2812 Level: intermediate 2813 2814 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2815 2816 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 2817 @*/ 2818 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 2819 { 2820 PetscInt nroots; 2821 PetscErrorCode ierr; 2822 2823 PetscFunctionBegin; 2824 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2825 PetscValidPointer(sf, 2); 2826 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 2827 if (nroots < 0) { 2828 PetscSection section, gSection; 2829 2830 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 2831 if (section) { 2832 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 2833 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 2834 } else { 2835 *sf = NULL; 2836 PetscFunctionReturn(0); 2837 } 2838 } 2839 *sf = dm->defaultSF; 2840 PetscFunctionReturn(0); 2841 } 2842 2843 #undef __FUNCT__ 2844 #define __FUNCT__ "DMSetDefaultSF" 2845 /*@ 2846 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 2847 2848 Input Parameters: 2849 + dm - The DM 2850 - sf - The PetscSF 2851 2852 Level: intermediate 2853 2854 Note: Any previous SF is destroyed 2855 2856 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 2857 @*/ 2858 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 2859 { 2860 PetscErrorCode ierr; 2861 2862 PetscFunctionBegin; 2863 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2864 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 2865 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 2866 dm->defaultSF = sf; 2867 PetscFunctionReturn(0); 2868 } 2869 2870 #undef __FUNCT__ 2871 #define __FUNCT__ "DMCreateDefaultSF" 2872 /*@C 2873 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 2874 describing the data layout. 2875 2876 Input Parameters: 2877 + dm - The DM 2878 . localSection - PetscSection describing the local data layout 2879 - globalSection - PetscSection describing the global data layout 2880 2881 Level: intermediate 2882 2883 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 2884 @*/ 2885 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 2886 { 2887 MPI_Comm comm; 2888 PetscLayout layout; 2889 const PetscInt *ranges; 2890 PetscInt *local; 2891 PetscSFNode *remote; 2892 PetscInt pStart, pEnd, p, nroots, nleaves, l; 2893 PetscMPIInt size, rank; 2894 PetscErrorCode ierr; 2895 2896 PetscFunctionBegin; 2897 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 2898 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2899 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 2900 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2901 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 2902 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 2903 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 2904 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 2905 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 2906 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 2907 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 2908 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 2909 PetscInt gdof, gcdof; 2910 2911 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 2912 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 2913 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 2914 } 2915 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 2916 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 2917 for (p = pStart, l = 0; p < pEnd; ++p) { 2918 const PetscInt *cind; 2919 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 2920 2921 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 2922 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 2923 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 2924 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 2925 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 2926 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 2927 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 2928 if (!gdof) continue; /* Censored point */ 2929 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 2930 if (gsize != dof-cdof) { 2931 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 2932 cdof = 0; /* Ignore constraints */ 2933 } 2934 for (d = 0, c = 0; d < dof; ++d) { 2935 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 2936 local[l+d-c] = off+d; 2937 } 2938 if (gdof < 0) { 2939 for (d = 0; d < gsize; ++d, ++l) { 2940 PetscInt offset = -(goff+1) + d, r; 2941 2942 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 2943 if (r < 0) r = -(r+2); 2944 remote[l].rank = r; 2945 remote[l].index = offset - ranges[r]; 2946 } 2947 } else { 2948 for (d = 0; d < gsize; ++d, ++l) { 2949 remote[l].rank = rank; 2950 remote[l].index = goff+d - ranges[rank]; 2951 } 2952 } 2953 } 2954 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 2955 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 2956 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 2957 PetscFunctionReturn(0); 2958 } 2959 2960 #undef __FUNCT__ 2961 #define __FUNCT__ "DMGetPointSF" 2962 /*@ 2963 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 2964 2965 Input Parameter: 2966 . dm - The DM 2967 2968 Output Parameter: 2969 . sf - The PetscSF 2970 2971 Level: intermediate 2972 2973 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2974 2975 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 2976 @*/ 2977 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 2978 { 2979 PetscFunctionBegin; 2980 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2981 PetscValidPointer(sf, 2); 2982 *sf = dm->sf; 2983 PetscFunctionReturn(0); 2984 } 2985 2986 #undef __FUNCT__ 2987 #define __FUNCT__ "DMSetPointSF" 2988 /*@ 2989 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 2990 2991 Input Parameters: 2992 + dm - The DM 2993 - sf - The PetscSF 2994 2995 Level: intermediate 2996 2997 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 2998 @*/ 2999 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3000 { 3001 PetscErrorCode ierr; 3002 3003 PetscFunctionBegin; 3004 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3005 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3006 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3007 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3008 dm->sf = sf; 3009 PetscFunctionReturn(0); 3010 } 3011 3012 #undef __FUNCT__ 3013 #define __FUNCT__ "DMGetNumFields" 3014 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3015 { 3016 PetscFunctionBegin; 3017 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3018 PetscValidPointer(numFields, 2); 3019 *numFields = dm->numFields; 3020 PetscFunctionReturn(0); 3021 } 3022 3023 #undef __FUNCT__ 3024 #define __FUNCT__ "DMSetNumFields" 3025 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3026 { 3027 PetscInt f; 3028 PetscErrorCode ierr; 3029 3030 PetscFunctionBegin; 3031 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3032 for (f = 0; f < dm->numFields; ++f) { 3033 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3034 } 3035 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3036 dm->numFields = numFields; 3037 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3038 for (f = 0; f < dm->numFields; ++f) { 3039 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3040 } 3041 PetscFunctionReturn(0); 3042 } 3043 3044 #undef __FUNCT__ 3045 #define __FUNCT__ "DMGetField" 3046 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3047 { 3048 PetscFunctionBegin; 3049 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3050 PetscValidPointer(field, 2); 3051 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3052 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3053 *field = dm->fields[f]; 3054 PetscFunctionReturn(0); 3055 } 3056 3057 #undef __FUNCT__ 3058 #define __FUNCT__ "DMSetCoordinates" 3059 /*@ 3060 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3061 3062 Collective on DM 3063 3064 Input Parameters: 3065 + dm - the DM 3066 - c - coordinate vector 3067 3068 Note: 3069 The coordinates do include those for ghost points, which are in the local vector 3070 3071 Level: intermediate 3072 3073 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3074 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3075 @*/ 3076 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3077 { 3078 PetscErrorCode ierr; 3079 3080 PetscFunctionBegin; 3081 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3082 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3083 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3084 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3085 dm->coordinates = c; 3086 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3087 PetscFunctionReturn(0); 3088 } 3089 3090 #undef __FUNCT__ 3091 #define __FUNCT__ "DMSetCoordinatesLocal" 3092 /*@ 3093 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3094 3095 Collective on DM 3096 3097 Input Parameters: 3098 + dm - the DM 3099 - c - coordinate vector 3100 3101 Note: 3102 The coordinates of ghost points can be set using DMSetCoordinates() 3103 followed by DMGetCoordinatesLocal(). This is intended to enable the 3104 setting of ghost coordinates outside of the domain. 3105 3106 Level: intermediate 3107 3108 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3109 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3110 @*/ 3111 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3112 { 3113 PetscErrorCode ierr; 3114 3115 PetscFunctionBegin; 3116 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3117 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3118 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3119 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3120 3121 dm->coordinatesLocal = c; 3122 3123 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3124 PetscFunctionReturn(0); 3125 } 3126 3127 #undef __FUNCT__ 3128 #define __FUNCT__ "DMGetCoordinates" 3129 /*@ 3130 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3131 3132 Not Collective 3133 3134 Input Parameter: 3135 . dm - the DM 3136 3137 Output Parameter: 3138 . c - global coordinate vector 3139 3140 Note: 3141 This is a borrowed reference, so the user should NOT destroy this vector 3142 3143 Each process has only the local coordinates (does NOT have the ghost coordinates). 3144 3145 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3146 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3147 3148 Level: intermediate 3149 3150 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3151 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3152 @*/ 3153 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3154 { 3155 PetscErrorCode ierr; 3156 3157 PetscFunctionBegin; 3158 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3159 PetscValidPointer(c,2); 3160 if (!dm->coordinates && dm->coordinatesLocal) { 3161 DM cdm = NULL; 3162 3163 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3164 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3165 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3166 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3167 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3168 } 3169 *c = dm->coordinates; 3170 PetscFunctionReturn(0); 3171 } 3172 3173 #undef __FUNCT__ 3174 #define __FUNCT__ "DMGetCoordinatesLocal" 3175 /*@ 3176 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3177 3178 Collective on DM 3179 3180 Input Parameter: 3181 . dm - the DM 3182 3183 Output Parameter: 3184 . c - coordinate vector 3185 3186 Note: 3187 This is a borrowed reference, so the user should NOT destroy this vector 3188 3189 Each process has the local and ghost coordinates 3190 3191 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3192 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3193 3194 Level: intermediate 3195 3196 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3197 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3198 @*/ 3199 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3200 { 3201 PetscErrorCode ierr; 3202 3203 PetscFunctionBegin; 3204 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3205 PetscValidPointer(c,2); 3206 if (!dm->coordinatesLocal && dm->coordinates) { 3207 DM cdm = NULL; 3208 3209 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3210 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3211 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3212 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3213 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3214 } 3215 *c = dm->coordinatesLocal; 3216 PetscFunctionReturn(0); 3217 } 3218 3219 #undef __FUNCT__ 3220 #define __FUNCT__ "DMGetCoordinateDM" 3221 /*@ 3222 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3223 3224 Collective on DM 3225 3226 Input Parameter: 3227 . dm - the DM 3228 3229 Output Parameter: 3230 . cdm - coordinate DM 3231 3232 Level: intermediate 3233 3234 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3235 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3236 @*/ 3237 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3238 { 3239 PetscErrorCode ierr; 3240 3241 PetscFunctionBegin; 3242 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3243 PetscValidPointer(cdm,2); 3244 if (!dm->coordinateDM) { 3245 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3246 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3247 } 3248 *cdm = dm->coordinateDM; 3249 PetscFunctionReturn(0); 3250 } 3251 3252 #undef __FUNCT__ 3253 #define __FUNCT__ "DMLocatePoints" 3254 /*@ 3255 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3256 3257 Not collective 3258 3259 Input Parameters: 3260 + dm - The DM 3261 - v - The Vec of points 3262 3263 Output Parameter: 3264 . cells - The local cell numbers for cells which contain the points 3265 3266 Level: developer 3267 3268 .keywords: point location, mesh 3269 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3270 @*/ 3271 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3272 { 3273 PetscErrorCode ierr; 3274 3275 PetscFunctionBegin; 3276 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3277 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3278 PetscValidPointer(cells,3); 3279 if (dm->ops->locatepoints) { 3280 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3281 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3282 PetscFunctionReturn(0); 3283 } 3284