1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMViewFromOptions" 9 /* 10 DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed. 11 12 Collective on Vec 13 14 Input Parameters: 15 + dm - the DM 16 . prefix - prefix to use for viewing, or NULL to use prefix of 'rnd' 17 - optionname - option to activate viewing 18 19 Level: intermediate 20 21 .keywords: DM, view, options, database 22 .seealso: VecViewFromOptions(), MatViewFromOptions() 23 */ 24 PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[]) 25 { 26 PetscErrorCode ierr; 27 PetscBool flg; 28 PetscViewer viewer; 29 PetscViewerFormat format; 30 31 PetscFunctionBegin; 32 if (prefix) { 33 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 34 } else { 35 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 36 } 37 if (flg) { 38 ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr); 39 ierr = DMView(dm,viewer);CHKERRQ(ierr); 40 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 41 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 42 } 43 PetscFunctionReturn(0); 44 } 45 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMCreate" 49 /*@ 50 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 51 52 If you never call DMSetType() it will generate an 53 error when you try to use the vector. 54 55 Collective on MPI_Comm 56 57 Input Parameter: 58 . comm - The communicator for the DM object 59 60 Output Parameter: 61 . dm - The DM object 62 63 Level: beginner 64 65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 66 @*/ 67 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 68 { 69 DM v; 70 PetscErrorCode ierr; 71 72 PetscFunctionBegin; 73 PetscValidPointer(dm,2); 74 *dm = NULL; 75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 76 ierr = VecInitializePackage();CHKERRQ(ierr); 77 ierr = MatInitializePackage();CHKERRQ(ierr); 78 ierr = DMInitializePackage();CHKERRQ(ierr); 79 #endif 80 81 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 82 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 83 84 85 v->ltogmap = NULL; 86 v->ltogmapb = NULL; 87 v->bs = 1; 88 v->coloringtype = IS_COLORING_GLOBAL; 89 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 90 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 91 v->defaultSection = NULL; 92 v->defaultGlobalSection = NULL; 93 { 94 PetscInt i; 95 for (i = 0; i < 10; ++i) { 96 v->nullspaceConstructors[i] = NULL; 97 } 98 } 99 v->numFields = 0; 100 v->fields = NULL; 101 102 *dm = v; 103 PetscFunctionReturn(0); 104 } 105 106 #undef __FUNCT__ 107 #define __FUNCT__ "DMSetVecType" 108 /*@C 109 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 110 111 Logically Collective on DMDA 112 113 Input Parameter: 114 + da - initial distributed array 115 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 116 117 Options Database: 118 . -dm_vec_type ctype 119 120 Level: intermediate 121 122 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 123 @*/ 124 PetscErrorCode DMSetVecType(DM da,VecType ctype) 125 { 126 PetscErrorCode ierr; 127 128 PetscFunctionBegin; 129 PetscValidHeaderSpecific(da,DM_CLASSID,1); 130 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 131 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 132 PetscFunctionReturn(0); 133 } 134 135 #undef __FUNCT__ 136 #define __FUNCT__ "VecGetDM" 137 /*@ 138 VecGetDM - Gets the DM defining the data layout of the vector 139 140 Not collective 141 142 Input Parameter: 143 . v - The Vec 144 145 Output Parameter: 146 . dm - The DM 147 148 Level: intermediate 149 150 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 151 @*/ 152 PetscErrorCode VecGetDM(Vec v, DM *dm) 153 { 154 PetscErrorCode ierr; 155 156 PetscFunctionBegin; 157 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 158 PetscValidPointer(dm,2); 159 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 160 PetscFunctionReturn(0); 161 } 162 163 #undef __FUNCT__ 164 #define __FUNCT__ "VecSetDM" 165 /*@ 166 VecSetDM - Sets the DM defining the data layout of the vector 167 168 Not collective 169 170 Input Parameters: 171 + v - The Vec 172 - dm - The DM 173 174 Level: intermediate 175 176 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 177 @*/ 178 PetscErrorCode VecSetDM(Vec v, DM dm) 179 { 180 PetscErrorCode ierr; 181 182 PetscFunctionBegin; 183 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 184 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 185 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 186 PetscFunctionReturn(0); 187 } 188 189 #undef __FUNCT__ 190 #define __FUNCT__ "DMSetMatType" 191 /*@C 192 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 193 194 Logically Collective on DM 195 196 Input Parameter: 197 + dm - the DM context 198 . ctype - the matrix type 199 200 Options Database: 201 . -dm_mat_type ctype 202 203 Level: intermediate 204 205 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 206 @*/ 207 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 208 { 209 PetscErrorCode ierr; 210 211 PetscFunctionBegin; 212 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 213 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 214 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 215 PetscFunctionReturn(0); 216 } 217 218 #undef __FUNCT__ 219 #define __FUNCT__ "MatGetDM" 220 /*@ 221 MatGetDM - Gets the DM defining the data layout of the matrix 222 223 Not collective 224 225 Input Parameter: 226 . A - The Mat 227 228 Output Parameter: 229 . dm - The DM 230 231 Level: intermediate 232 233 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 234 @*/ 235 PetscErrorCode MatGetDM(Mat A, DM *dm) 236 { 237 PetscErrorCode ierr; 238 239 PetscFunctionBegin; 240 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 241 PetscValidPointer(dm,2); 242 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 243 PetscFunctionReturn(0); 244 } 245 246 #undef __FUNCT__ 247 #define __FUNCT__ "MatSetDM" 248 /*@ 249 MatSetDM - Sets the DM defining the data layout of the matrix 250 251 Not collective 252 253 Input Parameters: 254 + A - The Mat 255 - dm - The DM 256 257 Level: intermediate 258 259 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 260 @*/ 261 PetscErrorCode MatSetDM(Mat A, DM dm) 262 { 263 PetscErrorCode ierr; 264 265 PetscFunctionBegin; 266 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 267 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 268 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 269 PetscFunctionReturn(0); 270 } 271 272 #undef __FUNCT__ 273 #define __FUNCT__ "DMSetOptionsPrefix" 274 /*@C 275 DMSetOptionsPrefix - Sets the prefix used for searching for all 276 DMDA options in the database. 277 278 Logically Collective on DMDA 279 280 Input Parameter: 281 + da - the DMDA context 282 - prefix - the prefix to prepend to all option names 283 284 Notes: 285 A hyphen (-) must NOT be given at the beginning of the prefix name. 286 The first character of all runtime options is AUTOMATICALLY the hyphen. 287 288 Level: advanced 289 290 .keywords: DMDA, set, options, prefix, database 291 292 .seealso: DMSetFromOptions() 293 @*/ 294 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 295 { 296 PetscErrorCode ierr; 297 298 PetscFunctionBegin; 299 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 300 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 301 PetscFunctionReturn(0); 302 } 303 304 #undef __FUNCT__ 305 #define __FUNCT__ "DMDestroy" 306 /*@ 307 DMDestroy - Destroys a vector packer or DMDA. 308 309 Collective on DM 310 311 Input Parameter: 312 . dm - the DM object to destroy 313 314 Level: developer 315 316 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 317 318 @*/ 319 PetscErrorCode DMDestroy(DM *dm) 320 { 321 PetscInt i, cnt = 0, f; 322 DMNamedVecLink nlink,nnext; 323 PetscErrorCode ierr; 324 325 PetscFunctionBegin; 326 if (!*dm) PetscFunctionReturn(0); 327 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 328 329 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 330 for (f = 0; f < (*dm)->numFields; ++f) { 331 ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 332 ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 333 ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 334 } 335 /* count all the circular references of DM and its contained Vecs */ 336 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 337 if ((*dm)->localin[i]) cnt++; 338 if ((*dm)->globalin[i]) cnt++; 339 } 340 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 341 if ((*dm)->x) { 342 DM obj; 343 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 344 if (obj == *dm) cnt++; 345 } 346 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 347 if ((*dm)->x) { 348 DM obj; 349 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 350 if (obj == *dm) cnt++; 351 } 352 353 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 354 /* 355 Need this test because the dm references the vectors that 356 reference the dm, so destroying the dm calls destroy on the 357 vectors that cause another destroy on the dm 358 */ 359 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 360 ((PetscObject) (*dm))->refct = 0; 361 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 362 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 363 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 364 } 365 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 366 nnext = nlink->next; 367 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 368 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 369 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 370 ierr = PetscFree(nlink);CHKERRQ(ierr); 371 } 372 (*dm)->namedglobal = NULL; 373 374 for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */ 375 nnext = nlink->next; 376 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 377 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 378 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 379 ierr = PetscFree(nlink);CHKERRQ(ierr); 380 } 381 (*dm)->namedlocal = NULL; 382 383 /* Destroy the list of hooks */ 384 { 385 DMCoarsenHookLink link,next; 386 for (link=(*dm)->coarsenhook; link; link=next) { 387 next = link->next; 388 ierr = PetscFree(link);CHKERRQ(ierr); 389 } 390 (*dm)->coarsenhook = NULL; 391 } 392 { 393 DMRefineHookLink link,next; 394 for (link=(*dm)->refinehook; link; link=next) { 395 next = link->next; 396 ierr = PetscFree(link);CHKERRQ(ierr); 397 } 398 (*dm)->refinehook = NULL; 399 } 400 { 401 DMSubDomainHookLink link,next; 402 for (link=(*dm)->subdomainhook; link; link=next) { 403 next = link->next; 404 ierr = PetscFree(link);CHKERRQ(ierr); 405 } 406 (*dm)->subdomainhook = NULL; 407 } 408 { 409 DMGlobalToLocalHookLink link,next; 410 for (link=(*dm)->gtolhook; link; link=next) { 411 next = link->next; 412 ierr = PetscFree(link);CHKERRQ(ierr); 413 } 414 (*dm)->gtolhook = NULL; 415 } 416 /* Destroy the work arrays */ 417 { 418 DMWorkLink link,next; 419 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 420 for (link=(*dm)->workin; link; link=next) { 421 next = link->next; 422 ierr = PetscFree(link->mem);CHKERRQ(ierr); 423 ierr = PetscFree(link);CHKERRQ(ierr); 424 } 425 (*dm)->workin = NULL; 426 } 427 428 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 429 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 430 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 431 432 if ((*dm)->ctx && (*dm)->ctxdestroy) { 433 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 434 } 435 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 436 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 437 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 438 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 439 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 440 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 441 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 442 443 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 444 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 445 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 446 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 447 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 448 449 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 450 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 451 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 452 453 for (f = 0; f < (*dm)->numFields; ++f) { 454 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 455 } 456 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 457 /* if memory was published with AMS then destroy it */ 458 ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr); 459 460 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 461 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 462 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 463 PetscFunctionReturn(0); 464 } 465 466 #undef __FUNCT__ 467 #define __FUNCT__ "DMSetUp" 468 /*@ 469 DMSetUp - sets up the data structures inside a DM object 470 471 Collective on DM 472 473 Input Parameter: 474 . dm - the DM object to setup 475 476 Level: developer 477 478 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 479 480 @*/ 481 PetscErrorCode DMSetUp(DM dm) 482 { 483 PetscErrorCode ierr; 484 485 PetscFunctionBegin; 486 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 487 if (dm->setupcalled) PetscFunctionReturn(0); 488 if (dm->ops->setup) { 489 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 490 } 491 dm->setupcalled = PETSC_TRUE; 492 PetscFunctionReturn(0); 493 } 494 495 #undef __FUNCT__ 496 #define __FUNCT__ "DMSetFromOptions" 497 /*@ 498 DMSetFromOptions - sets parameters in a DM from the options database 499 500 Collective on DM 501 502 Input Parameter: 503 . dm - the DM object to set options for 504 505 Options Database: 506 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 507 . -dm_vec_type <type> type of vector to create inside DM 508 . -dm_mat_type <type> type of matrix to create inside DM 509 - -dm_coloring_type <global or ghosted> 510 511 Level: developer 512 513 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 514 515 @*/ 516 PetscErrorCode DMSetFromOptions(DM dm) 517 { 518 char typeName[256] = MATAIJ; 519 PetscBool flg; 520 PetscErrorCode ierr; 521 522 PetscFunctionBegin; 523 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 524 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 525 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 526 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 527 if (flg) { 528 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 529 } 530 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 531 if (flg) { 532 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 533 } 534 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 535 if (dm->ops->setfromoptions) { 536 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 537 } 538 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 539 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 540 ierr = PetscOptionsEnd();CHKERRQ(ierr); 541 ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr); 542 PetscFunctionReturn(0); 543 } 544 545 #undef __FUNCT__ 546 #define __FUNCT__ "DMView" 547 /*@C 548 DMView - Views a vector packer or DMDA. 549 550 Collective on DM 551 552 Input Parameter: 553 + dm - the DM object to view 554 - v - the viewer 555 556 Level: developer 557 558 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 559 560 @*/ 561 PetscErrorCode DMView(DM dm,PetscViewer v) 562 { 563 PetscErrorCode ierr; 564 PetscBool isbinary; 565 566 PetscFunctionBegin; 567 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 568 if (!v) { 569 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 570 } 571 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 572 if (isbinary) { 573 PetscInt classid = DM_FILE_CLASSID; 574 char type[256]; 575 576 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 577 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 578 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 579 } 580 if (dm->ops->view) { 581 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 582 } 583 PetscFunctionReturn(0); 584 } 585 586 #undef __FUNCT__ 587 #define __FUNCT__ "DMCreateGlobalVector" 588 /*@ 589 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 590 591 Collective on DM 592 593 Input Parameter: 594 . dm - the DM object 595 596 Output Parameter: 597 . vec - the global vector 598 599 Level: beginner 600 601 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 602 603 @*/ 604 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 605 { 606 PetscErrorCode ierr; 607 608 PetscFunctionBegin; 609 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 610 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 611 PetscFunctionReturn(0); 612 } 613 614 #undef __FUNCT__ 615 #define __FUNCT__ "DMCreateLocalVector" 616 /*@ 617 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 618 619 Not Collective 620 621 Input Parameter: 622 . dm - the DM object 623 624 Output Parameter: 625 . vec - the local vector 626 627 Level: beginner 628 629 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 630 631 @*/ 632 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 633 { 634 PetscErrorCode ierr; 635 636 PetscFunctionBegin; 637 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 638 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 639 PetscFunctionReturn(0); 640 } 641 642 #undef __FUNCT__ 643 #define __FUNCT__ "DMGetLocalToGlobalMapping" 644 /*@ 645 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 646 647 Collective on DM 648 649 Input Parameter: 650 . dm - the DM that provides the mapping 651 652 Output Parameter: 653 . ltog - the mapping 654 655 Level: intermediate 656 657 Notes: 658 This mapping can then be used by VecSetLocalToGlobalMapping() or 659 MatSetLocalToGlobalMapping(). 660 661 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 662 @*/ 663 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 664 { 665 PetscErrorCode ierr; 666 667 PetscFunctionBegin; 668 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 669 PetscValidPointer(ltog,2); 670 if (!dm->ltogmap) { 671 PetscSection section, sectionGlobal; 672 673 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 674 if (section) { 675 PetscInt *ltog; 676 PetscInt pStart, pEnd, size, p, l; 677 678 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 679 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 680 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 681 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 682 for (p = pStart, l = 0; p < pEnd; ++p) { 683 PetscInt dof, off, c; 684 685 /* Should probably use constrained dofs */ 686 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 687 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 688 for (c = 0; c < dof; ++c, ++l) { 689 ltog[l] = off+c; 690 } 691 } 692 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 693 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 694 } else { 695 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 696 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 697 } 698 } 699 *ltog = dm->ltogmap; 700 PetscFunctionReturn(0); 701 } 702 703 #undef __FUNCT__ 704 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 705 /*@ 706 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 707 708 Collective on DM 709 710 Input Parameter: 711 . da - the distributed array that provides the mapping 712 713 Output Parameter: 714 . ltog - the block mapping 715 716 Level: intermediate 717 718 Notes: 719 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 720 MatSetLocalToGlobalMappingBlock(). 721 722 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 723 @*/ 724 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 725 { 726 PetscErrorCode ierr; 727 728 PetscFunctionBegin; 729 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 730 PetscValidPointer(ltog,2); 731 if (!dm->ltogmapb) { 732 PetscInt bs; 733 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 734 if (bs > 1) { 735 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 736 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 737 } else { 738 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 739 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 740 } 741 } 742 *ltog = dm->ltogmapb; 743 PetscFunctionReturn(0); 744 } 745 746 #undef __FUNCT__ 747 #define __FUNCT__ "DMGetBlockSize" 748 /*@ 749 DMGetBlockSize - Gets the inherent block size associated with a DM 750 751 Not Collective 752 753 Input Parameter: 754 . dm - the DM with block structure 755 756 Output Parameter: 757 . bs - the block size, 1 implies no exploitable block structure 758 759 Level: intermediate 760 761 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 762 @*/ 763 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 764 { 765 PetscFunctionBegin; 766 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 767 PetscValidPointer(bs,2); 768 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 769 *bs = dm->bs; 770 PetscFunctionReturn(0); 771 } 772 773 #undef __FUNCT__ 774 #define __FUNCT__ "DMCreateInterpolation" 775 /*@ 776 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 777 778 Collective on DM 779 780 Input Parameter: 781 + dm1 - the DM object 782 - dm2 - the second, finer DM object 783 784 Output Parameter: 785 + mat - the interpolation 786 - vec - the scaling (optional) 787 788 Level: developer 789 790 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 791 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 792 793 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 794 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 795 796 797 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 798 799 @*/ 800 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 801 { 802 PetscErrorCode ierr; 803 804 PetscFunctionBegin; 805 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 806 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 807 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 808 PetscFunctionReturn(0); 809 } 810 811 #undef __FUNCT__ 812 #define __FUNCT__ "DMCreateInjection" 813 /*@ 814 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 815 816 Collective on DM 817 818 Input Parameter: 819 + dm1 - the DM object 820 - dm2 - the second, finer DM object 821 822 Output Parameter: 823 . ctx - the injection 824 825 Level: developer 826 827 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 828 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 829 830 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 831 832 @*/ 833 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 834 { 835 PetscErrorCode ierr; 836 837 PetscFunctionBegin; 838 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 839 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 840 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 841 PetscFunctionReturn(0); 842 } 843 844 #undef __FUNCT__ 845 #define __FUNCT__ "DMCreateColoring" 846 /*@C 847 DMCreateColoring - Gets coloring for a DMDA or DMComposite 848 849 Collective on DM 850 851 Input Parameter: 852 + dm - the DM object 853 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 854 - matype - either MATAIJ or MATBAIJ 855 856 Output Parameter: 857 . coloring - the coloring 858 859 Level: developer 860 861 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 862 863 @*/ 864 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 865 { 866 PetscErrorCode ierr; 867 868 PetscFunctionBegin; 869 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 870 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 871 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 872 PetscFunctionReturn(0); 873 } 874 875 #undef __FUNCT__ 876 #define __FUNCT__ "DMCreateMatrix" 877 /*@C 878 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 879 880 Collective on DM 881 882 Input Parameter: 883 + dm - the DM object 884 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 885 any type which inherits from one of these (such as MATAIJ) 886 887 Output Parameter: 888 . mat - the empty Jacobian 889 890 Level: beginner 891 892 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 893 do not need to do it yourself. 894 895 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 896 the nonzero pattern call DMDASetMatPreallocateOnly() 897 898 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 899 internally by PETSc. 900 901 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 902 the indices for the global numbering for DMDAs which is complicated. 903 904 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 905 906 @*/ 907 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 908 { 909 PetscErrorCode ierr; 910 911 PetscFunctionBegin; 912 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 913 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 914 ierr = MatInitializePackage();CHKERRQ(ierr); 915 #endif 916 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 917 PetscValidPointer(mat,3); 918 if (dm->mattype) { 919 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 920 } else { 921 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 922 } 923 PetscFunctionReturn(0); 924 } 925 926 #undef __FUNCT__ 927 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 928 /*@ 929 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 930 preallocated but the nonzero structure and zero values will not be set. 931 932 Logically Collective on DMDA 933 934 Input Parameter: 935 + dm - the DM 936 - only - PETSC_TRUE if only want preallocation 937 938 Level: developer 939 .seealso DMCreateMatrix() 940 @*/ 941 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 942 { 943 PetscFunctionBegin; 944 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 945 dm->prealloc_only = only; 946 PetscFunctionReturn(0); 947 } 948 949 #undef __FUNCT__ 950 #define __FUNCT__ "DMGetWorkArray" 951 /*@C 952 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 953 954 Not Collective 955 956 Input Parameters: 957 + dm - the DM object 958 . count - The minium size 959 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 960 961 Output Parameter: 962 . array - the work array 963 964 Level: developer 965 966 .seealso DMDestroy(), DMCreate() 967 @*/ 968 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 969 { 970 PetscErrorCode ierr; 971 DMWorkLink link; 972 size_t size; 973 974 PetscFunctionBegin; 975 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 976 PetscValidPointer(mem,4); 977 if (dm->workin) { 978 link = dm->workin; 979 dm->workin = dm->workin->next; 980 } else { 981 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 982 } 983 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 984 if (size*count > link->bytes) { 985 ierr = PetscFree(link->mem);CHKERRQ(ierr); 986 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 987 link->bytes = size*count; 988 } 989 link->next = dm->workout; 990 dm->workout = link; 991 *(void**)mem = link->mem; 992 PetscFunctionReturn(0); 993 } 994 995 #undef __FUNCT__ 996 #define __FUNCT__ "DMRestoreWorkArray" 997 /*@C 998 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 999 1000 Not Collective 1001 1002 Input Parameters: 1003 + dm - the DM object 1004 . count - The minium size 1005 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1006 1007 Output Parameter: 1008 . array - the work array 1009 1010 Level: developer 1011 1012 .seealso DMDestroy(), DMCreate() 1013 @*/ 1014 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1015 { 1016 DMWorkLink *p,link; 1017 1018 PetscFunctionBegin; 1019 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1020 PetscValidPointer(mem,4); 1021 for (p=&dm->workout; (link=*p); p=&link->next) { 1022 if (link->mem == *(void**)mem) { 1023 *p = link->next; 1024 link->next = dm->workin; 1025 dm->workin = link; 1026 *(void**)mem = NULL; 1027 PetscFunctionReturn(0); 1028 } 1029 } 1030 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1031 PetscFunctionReturn(0); 1032 } 1033 1034 #undef __FUNCT__ 1035 #define __FUNCT__ "DMSetNullSpaceConstructor" 1036 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1037 { 1038 PetscFunctionBegin; 1039 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1040 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1041 dm->nullspaceConstructors[field] = nullsp; 1042 PetscFunctionReturn(0); 1043 } 1044 1045 #undef __FUNCT__ 1046 #define __FUNCT__ "DMCreateFieldIS" 1047 /*@C 1048 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1049 1050 Not collective 1051 1052 Input Parameter: 1053 . dm - the DM object 1054 1055 Output Parameters: 1056 + numFields - The number of fields (or NULL if not requested) 1057 . fieldNames - The name for each field (or NULL if not requested) 1058 - fields - The global indices for each field (or NULL if not requested) 1059 1060 Level: intermediate 1061 1062 Notes: 1063 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1064 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1065 PetscFree(). 1066 1067 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1068 @*/ 1069 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1070 { 1071 PetscSection section, sectionGlobal; 1072 PetscErrorCode ierr; 1073 1074 PetscFunctionBegin; 1075 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1076 if (numFields) { 1077 PetscValidPointer(numFields,2); 1078 *numFields = 0; 1079 } 1080 if (fieldNames) { 1081 PetscValidPointer(fieldNames,3); 1082 *fieldNames = NULL; 1083 } 1084 if (fields) { 1085 PetscValidPointer(fields,4); 1086 *fields = NULL; 1087 } 1088 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1089 if (section) { 1090 PetscInt *fieldSizes, **fieldIndices; 1091 PetscInt nF, f, pStart, pEnd, p; 1092 1093 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1094 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1095 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr); 1096 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1097 for (f = 0; f < nF; ++f) { 1098 fieldSizes[f] = 0; 1099 } 1100 for (p = pStart; p < pEnd; ++p) { 1101 PetscInt gdof; 1102 1103 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1104 if (gdof > 0) { 1105 for (f = 0; f < nF; ++f) { 1106 PetscInt fdof, fcdof; 1107 1108 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1109 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1110 fieldSizes[f] += fdof-fcdof; 1111 } 1112 } 1113 } 1114 for (f = 0; f < nF; ++f) { 1115 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1116 fieldSizes[f] = 0; 1117 } 1118 for (p = pStart; p < pEnd; ++p) { 1119 PetscInt gdof, goff; 1120 1121 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1122 if (gdof > 0) { 1123 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1124 for (f = 0; f < nF; ++f) { 1125 PetscInt fdof, fcdof, fc; 1126 1127 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1128 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1129 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1130 fieldIndices[f][fieldSizes[f]] = goff++; 1131 } 1132 } 1133 } 1134 } 1135 if (numFields) *numFields = nF; 1136 if (fieldNames) { 1137 ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr); 1138 for (f = 0; f < nF; ++f) { 1139 const char *fieldName; 1140 1141 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1142 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1143 } 1144 } 1145 if (fields) { 1146 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1147 for (f = 0; f < nF; ++f) { 1148 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1149 } 1150 } 1151 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1152 } else if (dm->ops->createfieldis) { 1153 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1154 } 1155 PetscFunctionReturn(0); 1156 } 1157 1158 1159 #undef __FUNCT__ 1160 #define __FUNCT__ "DMCreateFieldDecomposition" 1161 /*@C 1162 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1163 corresponding to different fields: each IS contains the global indices of the dofs of the 1164 corresponding field. The optional list of DMs define the DM for each subproblem. 1165 Generalizes DMCreateFieldIS(). 1166 1167 Not collective 1168 1169 Input Parameter: 1170 . dm - the DM object 1171 1172 Output Parameters: 1173 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1174 . namelist - The name for each field (or NULL if not requested) 1175 . islist - The global indices for each field (or NULL if not requested) 1176 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1177 1178 Level: intermediate 1179 1180 Notes: 1181 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1182 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1183 and all of the arrays should be freed with PetscFree(). 1184 1185 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1186 @*/ 1187 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1188 { 1189 PetscErrorCode ierr; 1190 1191 PetscFunctionBegin; 1192 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1193 if (len) { 1194 PetscValidPointer(len,2); 1195 *len = 0; 1196 } 1197 if (namelist) { 1198 PetscValidPointer(namelist,3); 1199 *namelist = 0; 1200 } 1201 if (islist) { 1202 PetscValidPointer(islist,4); 1203 *islist = 0; 1204 } 1205 if (dmlist) { 1206 PetscValidPointer(dmlist,5); 1207 *dmlist = 0; 1208 } 1209 /* 1210 Is it a good idea to apply the following check across all impls? 1211 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1212 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1213 */ 1214 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1215 if (!dm->ops->createfielddecomposition) { 1216 PetscSection section; 1217 PetscInt numFields, f; 1218 1219 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1220 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1221 if (section && numFields && dm->ops->createsubdm) { 1222 *len = numFields; 1223 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1224 for (f = 0; f < numFields; ++f) { 1225 const char *fieldName; 1226 1227 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1228 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1229 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1230 } 1231 } else { 1232 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1233 /* By default there are no DMs associated with subproblems. */ 1234 if (dmlist) *dmlist = NULL; 1235 } 1236 } else { 1237 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1238 } 1239 PetscFunctionReturn(0); 1240 } 1241 1242 #undef __FUNCT__ 1243 #define __FUNCT__ "DMCreateSubDM" 1244 /*@C 1245 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1246 The fields are defined by DMCreateFieldIS(). 1247 1248 Not collective 1249 1250 Input Parameters: 1251 + dm - the DM object 1252 . numFields - number of fields in this subproblem 1253 - len - The number of subproblems in the decomposition (or NULL if not requested) 1254 1255 Output Parameters: 1256 . is - The global indices for the subproblem 1257 - dm - The DM for the subproblem 1258 1259 Level: intermediate 1260 1261 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1262 @*/ 1263 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1264 { 1265 PetscErrorCode ierr; 1266 1267 PetscFunctionBegin; 1268 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1269 PetscValidPointer(fields,3); 1270 if (is) PetscValidPointer(is,4); 1271 if (subdm) PetscValidPointer(subdm,5); 1272 if (dm->ops->createsubdm) { 1273 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1274 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1275 PetscFunctionReturn(0); 1276 } 1277 1278 1279 #undef __FUNCT__ 1280 #define __FUNCT__ "DMCreateDomainDecomposition" 1281 /*@C 1282 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1283 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1284 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1285 define a nonoverlapping covering, while outer subdomains can overlap. 1286 The optional list of DMs define the DM for each subproblem. 1287 1288 Not collective 1289 1290 Input Parameter: 1291 . dm - the DM object 1292 1293 Output Parameters: 1294 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1295 . namelist - The name for each subdomain (or NULL if not requested) 1296 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1297 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1298 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1299 1300 Level: intermediate 1301 1302 Notes: 1303 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1304 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1305 and all of the arrays should be freed with PetscFree(). 1306 1307 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1308 @*/ 1309 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1310 { 1311 PetscErrorCode ierr; 1312 DMSubDomainHookLink link; 1313 PetscInt i,l; 1314 1315 PetscFunctionBegin; 1316 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1317 if (len) {PetscValidPointer(len,2); *len = 0;} 1318 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1319 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1320 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1321 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1322 /* 1323 Is it a good idea to apply the following check across all impls? 1324 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1325 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1326 */ 1327 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1328 if (dm->ops->createdomaindecomposition) { 1329 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1330 /* copy subdomain hooks and context over to the subdomain DMs */ 1331 if (dmlist) { 1332 for (i = 0; i < l; i++) { 1333 for (link=dm->subdomainhook; link; link=link->next) { 1334 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1335 } 1336 (*dmlist)[i]->ctx = dm->ctx; 1337 } 1338 } 1339 if (len) *len = l; 1340 } 1341 PetscFunctionReturn(0); 1342 } 1343 1344 1345 #undef __FUNCT__ 1346 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1347 /*@C 1348 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1349 1350 Not collective 1351 1352 Input Parameters: 1353 + dm - the DM object 1354 . n - the number of subdomain scatters 1355 - subdms - the local subdomains 1356 1357 Output Parameters: 1358 + n - the number of scatters returned 1359 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1360 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1361 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1362 1363 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1364 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1365 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1366 solution and residual data. 1367 1368 Level: developer 1369 1370 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1371 @*/ 1372 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1373 { 1374 PetscErrorCode ierr; 1375 1376 PetscFunctionBegin; 1377 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1378 PetscValidPointer(subdms,3); 1379 if (dm->ops->createddscatters) { 1380 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1381 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1382 PetscFunctionReturn(0); 1383 } 1384 1385 #undef __FUNCT__ 1386 #define __FUNCT__ "DMRefine" 1387 /*@ 1388 DMRefine - Refines a DM object 1389 1390 Collective on DM 1391 1392 Input Parameter: 1393 + dm - the DM object 1394 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1395 1396 Output Parameter: 1397 . dmf - the refined DM, or NULL 1398 1399 Note: If no refinement was done, the return value is NULL 1400 1401 Level: developer 1402 1403 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1404 @*/ 1405 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1406 { 1407 PetscErrorCode ierr; 1408 DMRefineHookLink link; 1409 1410 PetscFunctionBegin; 1411 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1412 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1413 if (*dmf) { 1414 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1415 1416 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1417 1418 (*dmf)->ctx = dm->ctx; 1419 (*dmf)->leveldown = dm->leveldown; 1420 (*dmf)->levelup = dm->levelup + 1; 1421 1422 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1423 for (link=dm->refinehook; link; link=link->next) { 1424 if (link->refinehook) { 1425 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1426 } 1427 } 1428 } 1429 PetscFunctionReturn(0); 1430 } 1431 1432 #undef __FUNCT__ 1433 #define __FUNCT__ "DMRefineHookAdd" 1434 /*@C 1435 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1436 1437 Logically Collective 1438 1439 Input Arguments: 1440 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1441 . refinehook - function to run when setting up a coarser level 1442 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1443 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1444 1445 Calling sequence of refinehook: 1446 $ refinehook(DM coarse,DM fine,void *ctx); 1447 1448 + coarse - coarse level DM 1449 . fine - fine level DM to interpolate problem to 1450 - ctx - optional user-defined function context 1451 1452 Calling sequence for interphook: 1453 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1454 1455 + coarse - coarse level DM 1456 . interp - matrix interpolating a coarse-level solution to the finer grid 1457 . fine - fine level DM to update 1458 - ctx - optional user-defined function context 1459 1460 Level: advanced 1461 1462 Notes: 1463 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1464 1465 If this function is called multiple times, the hooks will be run in the order they are added. 1466 1467 This function is currently not available from Fortran. 1468 1469 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1470 @*/ 1471 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1472 { 1473 PetscErrorCode ierr; 1474 DMRefineHookLink link,*p; 1475 1476 PetscFunctionBegin; 1477 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1478 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1479 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1480 link->refinehook = refinehook; 1481 link->interphook = interphook; 1482 link->ctx = ctx; 1483 link->next = NULL; 1484 *p = link; 1485 PetscFunctionReturn(0); 1486 } 1487 1488 #undef __FUNCT__ 1489 #define __FUNCT__ "DMInterpolate" 1490 /*@ 1491 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1492 1493 Collective if any hooks are 1494 1495 Input Arguments: 1496 + coarse - coarser DM to use as a base 1497 . restrct - interpolation matrix, apply using MatInterpolate() 1498 - fine - finer DM to update 1499 1500 Level: developer 1501 1502 .seealso: DMRefineHookAdd(), MatInterpolate() 1503 @*/ 1504 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1505 { 1506 PetscErrorCode ierr; 1507 DMRefineHookLink link; 1508 1509 PetscFunctionBegin; 1510 for (link=fine->refinehook; link; link=link->next) { 1511 if (link->interphook) { 1512 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1513 } 1514 } 1515 PetscFunctionReturn(0); 1516 } 1517 1518 #undef __FUNCT__ 1519 #define __FUNCT__ "DMGetRefineLevel" 1520 /*@ 1521 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1522 1523 Not Collective 1524 1525 Input Parameter: 1526 . dm - the DM object 1527 1528 Output Parameter: 1529 . level - number of refinements 1530 1531 Level: developer 1532 1533 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1534 1535 @*/ 1536 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1537 { 1538 PetscFunctionBegin; 1539 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1540 *level = dm->levelup; 1541 PetscFunctionReturn(0); 1542 } 1543 1544 #undef __FUNCT__ 1545 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1546 /*@C 1547 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1548 1549 Logically Collective 1550 1551 Input Arguments: 1552 + dm - the DM 1553 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1554 . endhook - function to run after DMGlobalToLocalEnd() has completed 1555 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1556 1557 Calling sequence for beginhook: 1558 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1559 1560 + dm - global DM 1561 . g - global vector 1562 . mode - mode 1563 . l - local vector 1564 - ctx - optional user-defined function context 1565 1566 1567 Calling sequence for endhook: 1568 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1569 1570 + global - global DM 1571 - ctx - optional user-defined function context 1572 1573 Level: advanced 1574 1575 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1576 @*/ 1577 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1578 { 1579 PetscErrorCode ierr; 1580 DMGlobalToLocalHookLink link,*p; 1581 1582 PetscFunctionBegin; 1583 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1584 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1585 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1586 link->beginhook = beginhook; 1587 link->endhook = endhook; 1588 link->ctx = ctx; 1589 link->next = NULL; 1590 *p = link; 1591 PetscFunctionReturn(0); 1592 } 1593 1594 #undef __FUNCT__ 1595 #define __FUNCT__ "DMGlobalToLocalBegin" 1596 /*@ 1597 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1598 1599 Neighbor-wise Collective on DM 1600 1601 Input Parameters: 1602 + dm - the DM object 1603 . g - the global vector 1604 . mode - INSERT_VALUES or ADD_VALUES 1605 - l - the local vector 1606 1607 1608 Level: beginner 1609 1610 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1611 1612 @*/ 1613 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1614 { 1615 PetscSF sf; 1616 PetscErrorCode ierr; 1617 DMGlobalToLocalHookLink link; 1618 1619 PetscFunctionBegin; 1620 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1621 for (link=dm->gtolhook; link; link=link->next) { 1622 if (link->beginhook) { 1623 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1624 } 1625 } 1626 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1627 if (sf) { 1628 PetscScalar *lArray, *gArray; 1629 1630 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1631 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1632 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1633 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1634 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1635 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1636 } else { 1637 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1638 } 1639 PetscFunctionReturn(0); 1640 } 1641 1642 #undef __FUNCT__ 1643 #define __FUNCT__ "DMGlobalToLocalEnd" 1644 /*@ 1645 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1646 1647 Neighbor-wise Collective on DM 1648 1649 Input Parameters: 1650 + dm - the DM object 1651 . g - the global vector 1652 . mode - INSERT_VALUES or ADD_VALUES 1653 - l - the local vector 1654 1655 1656 Level: beginner 1657 1658 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1659 1660 @*/ 1661 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1662 { 1663 PetscSF sf; 1664 PetscErrorCode ierr; 1665 PetscScalar *lArray, *gArray; 1666 DMGlobalToLocalHookLink link; 1667 1668 PetscFunctionBegin; 1669 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1670 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1671 if (sf) { 1672 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1673 1674 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1675 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1676 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1677 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1678 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1679 } else { 1680 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1681 } 1682 for (link=dm->gtolhook; link; link=link->next) { 1683 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1684 } 1685 PetscFunctionReturn(0); 1686 } 1687 1688 #undef __FUNCT__ 1689 #define __FUNCT__ "DMLocalToGlobalBegin" 1690 /*@ 1691 DMLocalToGlobalBegin - updates global vectors from local vectors 1692 1693 Neighbor-wise Collective on DM 1694 1695 Input Parameters: 1696 + dm - the DM object 1697 . l - the local vector 1698 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1699 base point. 1700 - - the global vector 1701 1702 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1703 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1704 global array to the final global array with VecAXPY(). 1705 1706 Level: beginner 1707 1708 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1709 1710 @*/ 1711 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1712 { 1713 PetscSF sf; 1714 PetscErrorCode ierr; 1715 1716 PetscFunctionBegin; 1717 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1718 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1719 if (sf) { 1720 MPI_Op op; 1721 PetscScalar *lArray, *gArray; 1722 1723 switch (mode) { 1724 case INSERT_VALUES: 1725 case INSERT_ALL_VALUES: 1726 op = MPIU_REPLACE; break; 1727 case ADD_VALUES: 1728 case ADD_ALL_VALUES: 1729 op = MPI_SUM; break; 1730 default: 1731 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1732 } 1733 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1734 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1735 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1736 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1737 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1738 } else { 1739 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1740 } 1741 PetscFunctionReturn(0); 1742 } 1743 1744 #undef __FUNCT__ 1745 #define __FUNCT__ "DMLocalToGlobalEnd" 1746 /*@ 1747 DMLocalToGlobalEnd - updates global vectors from local vectors 1748 1749 Neighbor-wise Collective on DM 1750 1751 Input Parameters: 1752 + dm - the DM object 1753 . l - the local vector 1754 . mode - INSERT_VALUES or ADD_VALUES 1755 - g - the global vector 1756 1757 1758 Level: beginner 1759 1760 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1761 1762 @*/ 1763 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1764 { 1765 PetscSF sf; 1766 PetscErrorCode ierr; 1767 1768 PetscFunctionBegin; 1769 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1770 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1771 if (sf) { 1772 MPI_Op op; 1773 PetscScalar *lArray, *gArray; 1774 1775 switch (mode) { 1776 case INSERT_VALUES: 1777 case INSERT_ALL_VALUES: 1778 op = MPIU_REPLACE; break; 1779 case ADD_VALUES: 1780 case ADD_ALL_VALUES: 1781 op = MPI_SUM; break; 1782 default: 1783 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1784 } 1785 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1786 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1787 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1788 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1789 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1790 } else { 1791 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1792 } 1793 PetscFunctionReturn(0); 1794 } 1795 1796 #undef __FUNCT__ 1797 #define __FUNCT__ "DMCoarsen" 1798 /*@ 1799 DMCoarsen - Coarsens a DM object 1800 1801 Collective on DM 1802 1803 Input Parameter: 1804 + dm - the DM object 1805 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1806 1807 Output Parameter: 1808 . dmc - the coarsened DM 1809 1810 Level: developer 1811 1812 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1813 1814 @*/ 1815 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1816 { 1817 PetscErrorCode ierr; 1818 DMCoarsenHookLink link; 1819 1820 PetscFunctionBegin; 1821 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1822 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1823 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1824 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1825 (*dmc)->ctx = dm->ctx; 1826 (*dmc)->levelup = dm->levelup; 1827 (*dmc)->leveldown = dm->leveldown + 1; 1828 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1829 for (link=dm->coarsenhook; link; link=link->next) { 1830 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1831 } 1832 PetscFunctionReturn(0); 1833 } 1834 1835 #undef __FUNCT__ 1836 #define __FUNCT__ "DMCoarsenHookAdd" 1837 /*@C 1838 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1839 1840 Logically Collective 1841 1842 Input Arguments: 1843 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1844 . coarsenhook - function to run when setting up a coarser level 1845 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1846 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1847 1848 Calling sequence of coarsenhook: 1849 $ coarsenhook(DM fine,DM coarse,void *ctx); 1850 1851 + fine - fine level DM 1852 . coarse - coarse level DM to restrict problem to 1853 - ctx - optional user-defined function context 1854 1855 Calling sequence for restricthook: 1856 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1857 1858 + fine - fine level DM 1859 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1860 . rscale - scaling vector for restriction 1861 . inject - matrix restricting by injection 1862 . coarse - coarse level DM to update 1863 - ctx - optional user-defined function context 1864 1865 Level: advanced 1866 1867 Notes: 1868 This function is only needed if auxiliary data needs to be set up on coarse grids. 1869 1870 If this function is called multiple times, the hooks will be run in the order they are added. 1871 1872 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1873 extract the finest level information from its context (instead of from the SNES). 1874 1875 This function is currently not available from Fortran. 1876 1877 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1878 @*/ 1879 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1880 { 1881 PetscErrorCode ierr; 1882 DMCoarsenHookLink link,*p; 1883 1884 PetscFunctionBegin; 1885 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1886 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1887 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1888 link->coarsenhook = coarsenhook; 1889 link->restricthook = restricthook; 1890 link->ctx = ctx; 1891 link->next = NULL; 1892 *p = link; 1893 PetscFunctionReturn(0); 1894 } 1895 1896 #undef __FUNCT__ 1897 #define __FUNCT__ "DMRestrict" 1898 /*@ 1899 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1900 1901 Collective if any hooks are 1902 1903 Input Arguments: 1904 + fine - finer DM to use as a base 1905 . restrct - restriction matrix, apply using MatRestrict() 1906 . inject - injection matrix, also use MatRestrict() 1907 - coarse - coarer DM to update 1908 1909 Level: developer 1910 1911 .seealso: DMCoarsenHookAdd(), MatRestrict() 1912 @*/ 1913 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1914 { 1915 PetscErrorCode ierr; 1916 DMCoarsenHookLink link; 1917 1918 PetscFunctionBegin; 1919 for (link=fine->coarsenhook; link; link=link->next) { 1920 if (link->restricthook) { 1921 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 1922 } 1923 } 1924 PetscFunctionReturn(0); 1925 } 1926 1927 #undef __FUNCT__ 1928 #define __FUNCT__ "DMSubDomainHookAdd" 1929 /*@C 1930 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 1931 1932 Logically Collective 1933 1934 Input Arguments: 1935 + global - global DM 1936 . ddhook - function to run to pass data to the decomposition DM upon its creation 1937 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1938 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1939 1940 1941 Calling sequence for ddhook: 1942 $ ddhook(DM global,DM block,void *ctx) 1943 1944 + global - global DM 1945 . block - block DM 1946 - ctx - optional user-defined function context 1947 1948 Calling sequence for restricthook: 1949 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 1950 1951 + global - global DM 1952 . out - scatter to the outer (with ghost and overlap points) block vector 1953 . in - scatter to block vector values only owned locally 1954 . block - block DM 1955 - ctx - optional user-defined function context 1956 1957 Level: advanced 1958 1959 Notes: 1960 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 1961 1962 If this function is called multiple times, the hooks will be run in the order they are added. 1963 1964 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1965 extract the global information from its context (instead of from the SNES). 1966 1967 This function is currently not available from Fortran. 1968 1969 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1970 @*/ 1971 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1972 { 1973 PetscErrorCode ierr; 1974 DMSubDomainHookLink link,*p; 1975 1976 PetscFunctionBegin; 1977 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1978 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1979 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 1980 link->restricthook = restricthook; 1981 link->ddhook = ddhook; 1982 link->ctx = ctx; 1983 link->next = NULL; 1984 *p = link; 1985 PetscFunctionReturn(0); 1986 } 1987 1988 #undef __FUNCT__ 1989 #define __FUNCT__ "DMSubDomainRestrict" 1990 /*@ 1991 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 1992 1993 Collective if any hooks are 1994 1995 Input Arguments: 1996 + fine - finer DM to use as a base 1997 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 1998 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 1999 - coarse - coarer DM to update 2000 2001 Level: developer 2002 2003 .seealso: DMCoarsenHookAdd(), MatRestrict() 2004 @*/ 2005 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2006 { 2007 PetscErrorCode ierr; 2008 DMSubDomainHookLink link; 2009 2010 PetscFunctionBegin; 2011 for (link=global->subdomainhook; link; link=link->next) { 2012 if (link->restricthook) { 2013 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2014 } 2015 } 2016 PetscFunctionReturn(0); 2017 } 2018 2019 #undef __FUNCT__ 2020 #define __FUNCT__ "DMGetCoarsenLevel" 2021 /*@ 2022 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2023 2024 Not Collective 2025 2026 Input Parameter: 2027 . dm - the DM object 2028 2029 Output Parameter: 2030 . level - number of coarsenings 2031 2032 Level: developer 2033 2034 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2035 2036 @*/ 2037 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2038 { 2039 PetscFunctionBegin; 2040 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2041 *level = dm->leveldown; 2042 PetscFunctionReturn(0); 2043 } 2044 2045 2046 2047 #undef __FUNCT__ 2048 #define __FUNCT__ "DMRefineHierarchy" 2049 /*@C 2050 DMRefineHierarchy - Refines a DM object, all levels at once 2051 2052 Collective on DM 2053 2054 Input Parameter: 2055 + dm - the DM object 2056 - nlevels - the number of levels of refinement 2057 2058 Output Parameter: 2059 . dmf - the refined DM hierarchy 2060 2061 Level: developer 2062 2063 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2064 2065 @*/ 2066 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2067 { 2068 PetscErrorCode ierr; 2069 2070 PetscFunctionBegin; 2071 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2072 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2073 if (nlevels == 0) PetscFunctionReturn(0); 2074 if (dm->ops->refinehierarchy) { 2075 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2076 } else if (dm->ops->refine) { 2077 PetscInt i; 2078 2079 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2080 for (i=1; i<nlevels; i++) { 2081 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2082 } 2083 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2084 PetscFunctionReturn(0); 2085 } 2086 2087 #undef __FUNCT__ 2088 #define __FUNCT__ "DMCoarsenHierarchy" 2089 /*@C 2090 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2091 2092 Collective on DM 2093 2094 Input Parameter: 2095 + dm - the DM object 2096 - nlevels - the number of levels of coarsening 2097 2098 Output Parameter: 2099 . dmc - the coarsened DM hierarchy 2100 2101 Level: developer 2102 2103 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2104 2105 @*/ 2106 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2107 { 2108 PetscErrorCode ierr; 2109 2110 PetscFunctionBegin; 2111 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2112 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2113 if (nlevels == 0) PetscFunctionReturn(0); 2114 PetscValidPointer(dmc,3); 2115 if (dm->ops->coarsenhierarchy) { 2116 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2117 } else if (dm->ops->coarsen) { 2118 PetscInt i; 2119 2120 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2121 for (i=1; i<nlevels; i++) { 2122 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2123 } 2124 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2125 PetscFunctionReturn(0); 2126 } 2127 2128 #undef __FUNCT__ 2129 #define __FUNCT__ "DMCreateAggregates" 2130 /*@ 2131 DMCreateAggregates - Gets the aggregates that map between 2132 grids associated with two DMs. 2133 2134 Collective on DM 2135 2136 Input Parameters: 2137 + dmc - the coarse grid DM 2138 - dmf - the fine grid DM 2139 2140 Output Parameters: 2141 . rest - the restriction matrix (transpose of the projection matrix) 2142 2143 Level: intermediate 2144 2145 .keywords: interpolation, restriction, multigrid 2146 2147 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2148 @*/ 2149 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2150 { 2151 PetscErrorCode ierr; 2152 2153 PetscFunctionBegin; 2154 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2155 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2156 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2157 PetscFunctionReturn(0); 2158 } 2159 2160 #undef __FUNCT__ 2161 #define __FUNCT__ "DMSetApplicationContextDestroy" 2162 /*@C 2163 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2164 2165 Not Collective 2166 2167 Input Parameters: 2168 + dm - the DM object 2169 - destroy - the destroy function 2170 2171 Level: intermediate 2172 2173 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2174 2175 @*/ 2176 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2177 { 2178 PetscFunctionBegin; 2179 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2180 dm->ctxdestroy = destroy; 2181 PetscFunctionReturn(0); 2182 } 2183 2184 #undef __FUNCT__ 2185 #define __FUNCT__ "DMSetApplicationContext" 2186 /*@ 2187 DMSetApplicationContext - Set a user context into a DM object 2188 2189 Not Collective 2190 2191 Input Parameters: 2192 + dm - the DM object 2193 - ctx - the user context 2194 2195 Level: intermediate 2196 2197 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2198 2199 @*/ 2200 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2201 { 2202 PetscFunctionBegin; 2203 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2204 dm->ctx = ctx; 2205 PetscFunctionReturn(0); 2206 } 2207 2208 #undef __FUNCT__ 2209 #define __FUNCT__ "DMGetApplicationContext" 2210 /*@ 2211 DMGetApplicationContext - Gets a user context from a DM object 2212 2213 Not Collective 2214 2215 Input Parameter: 2216 . dm - the DM object 2217 2218 Output Parameter: 2219 . ctx - the user context 2220 2221 Level: intermediate 2222 2223 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2224 2225 @*/ 2226 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2227 { 2228 PetscFunctionBegin; 2229 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2230 *(void**)ctx = dm->ctx; 2231 PetscFunctionReturn(0); 2232 } 2233 2234 #undef __FUNCT__ 2235 #define __FUNCT__ "DMSetVariableBounds" 2236 /*@C 2237 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2238 2239 Logically Collective on DM 2240 2241 Input Parameter: 2242 + dm - the DM object 2243 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2244 2245 Level: intermediate 2246 2247 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2248 DMSetJacobian() 2249 2250 @*/ 2251 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2252 { 2253 PetscFunctionBegin; 2254 dm->ops->computevariablebounds = f; 2255 PetscFunctionReturn(0); 2256 } 2257 2258 #undef __FUNCT__ 2259 #define __FUNCT__ "DMHasVariableBounds" 2260 /*@ 2261 DMHasVariableBounds - does the DM object have a variable bounds function? 2262 2263 Not Collective 2264 2265 Input Parameter: 2266 . dm - the DM object to destroy 2267 2268 Output Parameter: 2269 . flg - PETSC_TRUE if the variable bounds function exists 2270 2271 Level: developer 2272 2273 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2274 2275 @*/ 2276 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2277 { 2278 PetscFunctionBegin; 2279 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2280 PetscFunctionReturn(0); 2281 } 2282 2283 #undef __FUNCT__ 2284 #define __FUNCT__ "DMComputeVariableBounds" 2285 /*@C 2286 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2287 2288 Logically Collective on DM 2289 2290 Input Parameters: 2291 + dm - the DM object to destroy 2292 - x - current solution at which the bounds are computed 2293 2294 Output parameters: 2295 + xl - lower bound 2296 - xu - upper bound 2297 2298 Level: intermediate 2299 2300 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2301 2302 @*/ 2303 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2304 { 2305 PetscErrorCode ierr; 2306 2307 PetscFunctionBegin; 2308 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2309 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2310 if (dm->ops->computevariablebounds) { 2311 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2312 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2313 PetscFunctionReturn(0); 2314 } 2315 2316 #undef __FUNCT__ 2317 #define __FUNCT__ "DMHasColoring" 2318 /*@ 2319 DMHasColoring - does the DM object have a method of providing a coloring? 2320 2321 Not Collective 2322 2323 Input Parameter: 2324 . dm - the DM object 2325 2326 Output Parameter: 2327 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2328 2329 Level: developer 2330 2331 .seealso DMHasFunction(), DMCreateColoring() 2332 2333 @*/ 2334 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2335 { 2336 PetscFunctionBegin; 2337 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2338 PetscFunctionReturn(0); 2339 } 2340 2341 #undef __FUNCT__ 2342 #define __FUNCT__ "DMSetVec" 2343 /*@C 2344 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2345 2346 Collective on DM 2347 2348 Input Parameter: 2349 + dm - the DM object 2350 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2351 2352 Level: developer 2353 2354 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2355 2356 @*/ 2357 PetscErrorCode DMSetVec(DM dm,Vec x) 2358 { 2359 PetscErrorCode ierr; 2360 2361 PetscFunctionBegin; 2362 if (x) { 2363 if (!dm->x) { 2364 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2365 } 2366 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2367 } else if (dm->x) { 2368 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2369 } 2370 PetscFunctionReturn(0); 2371 } 2372 2373 PetscFunctionList DMList = NULL; 2374 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2375 2376 #undef __FUNCT__ 2377 #define __FUNCT__ "DMSetType" 2378 /*@C 2379 DMSetType - Builds a DM, for a particular DM implementation. 2380 2381 Collective on DM 2382 2383 Input Parameters: 2384 + dm - The DM object 2385 - method - The name of the DM type 2386 2387 Options Database Key: 2388 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2389 2390 Notes: 2391 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2392 2393 Level: intermediate 2394 2395 .keywords: DM, set, type 2396 .seealso: DMGetType(), DMCreate() 2397 @*/ 2398 PetscErrorCode DMSetType(DM dm, DMType method) 2399 { 2400 PetscErrorCode (*r)(DM); 2401 PetscBool match; 2402 PetscErrorCode ierr; 2403 2404 PetscFunctionBegin; 2405 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2406 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2407 if (match) PetscFunctionReturn(0); 2408 2409 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2410 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2411 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2412 2413 if (dm->ops->destroy) { 2414 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2415 dm->ops->destroy = NULL; 2416 } 2417 ierr = (*r)(dm);CHKERRQ(ierr); 2418 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2419 PetscFunctionReturn(0); 2420 } 2421 2422 #undef __FUNCT__ 2423 #define __FUNCT__ "DMGetType" 2424 /*@C 2425 DMGetType - Gets the DM type name (as a string) from the DM. 2426 2427 Not Collective 2428 2429 Input Parameter: 2430 . dm - The DM 2431 2432 Output Parameter: 2433 . type - The DM type name 2434 2435 Level: intermediate 2436 2437 .keywords: DM, get, type, name 2438 .seealso: DMSetType(), DMCreate() 2439 @*/ 2440 PetscErrorCode DMGetType(DM dm, DMType *type) 2441 { 2442 PetscErrorCode ierr; 2443 2444 PetscFunctionBegin; 2445 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2446 PetscValidCharPointer(type,2); 2447 if (!DMRegisterAllCalled) { 2448 ierr = DMRegisterAll();CHKERRQ(ierr); 2449 } 2450 *type = ((PetscObject)dm)->type_name; 2451 PetscFunctionReturn(0); 2452 } 2453 2454 #undef __FUNCT__ 2455 #define __FUNCT__ "DMConvert" 2456 /*@C 2457 DMConvert - Converts a DM to another DM, either of the same or different type. 2458 2459 Collective on DM 2460 2461 Input Parameters: 2462 + dm - the DM 2463 - newtype - new DM type (use "same" for the same type) 2464 2465 Output Parameter: 2466 . M - pointer to new DM 2467 2468 Notes: 2469 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2470 the MPI communicator of the generated DM is always the same as the communicator 2471 of the input DM. 2472 2473 Level: intermediate 2474 2475 .seealso: DMCreate() 2476 @*/ 2477 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2478 { 2479 DM B; 2480 char convname[256]; 2481 PetscBool sametype, issame; 2482 PetscErrorCode ierr; 2483 2484 PetscFunctionBegin; 2485 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2486 PetscValidType(dm,1); 2487 PetscValidPointer(M,3); 2488 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2489 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2490 { 2491 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2492 2493 /* 2494 Order of precedence: 2495 1) See if a specialized converter is known to the current DM. 2496 2) See if a specialized converter is known to the desired DM class. 2497 3) See if a good general converter is registered for the desired class 2498 4) See if a good general converter is known for the current matrix. 2499 5) Use a really basic converter. 2500 */ 2501 2502 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2503 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2504 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2505 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2506 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2507 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2508 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2509 if (conv) goto foundconv; 2510 2511 /* 2) See if a specialized converter is known to the desired DM class. */ 2512 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2513 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2514 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2515 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2516 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2517 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2518 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2519 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2520 if (conv) { 2521 ierr = DMDestroy(&B);CHKERRQ(ierr); 2522 goto foundconv; 2523 } 2524 2525 #if 0 2526 /* 3) See if a good general converter is registered for the desired class */ 2527 conv = B->ops->convertfrom; 2528 ierr = DMDestroy(&B);CHKERRQ(ierr); 2529 if (conv) goto foundconv; 2530 2531 /* 4) See if a good general converter is known for the current matrix */ 2532 if (dm->ops->convert) { 2533 conv = dm->ops->convert; 2534 } 2535 if (conv) goto foundconv; 2536 #endif 2537 2538 /* 5) Use a really basic converter. */ 2539 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2540 2541 foundconv: 2542 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2543 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2544 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2545 } 2546 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2547 PetscFunctionReturn(0); 2548 } 2549 2550 /*--------------------------------------------------------------------------------------------------------------------*/ 2551 2552 #undef __FUNCT__ 2553 #define __FUNCT__ "DMRegister" 2554 /*@C 2555 DMRegister - Adds a new DM component implementation 2556 2557 Not Collective 2558 2559 Input Parameters: 2560 + name - The name of a new user-defined creation routine 2561 - create_func - The creation routine itself 2562 2563 Notes: 2564 DMRegister() may be called multiple times to add several user-defined DMs 2565 2566 2567 Sample usage: 2568 .vb 2569 DMRegister("my_da", MyDMCreate); 2570 .ve 2571 2572 Then, your DM type can be chosen with the procedural interface via 2573 .vb 2574 DMCreate(MPI_Comm, DM *); 2575 DMSetType(DM,"my_da"); 2576 .ve 2577 or at runtime via the option 2578 .vb 2579 -da_type my_da 2580 .ve 2581 2582 Level: advanced 2583 2584 .keywords: DM, register 2585 .seealso: DMRegisterAll(), DMRegisterDestroy() 2586 2587 @*/ 2588 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2589 { 2590 PetscErrorCode ierr; 2591 2592 PetscFunctionBegin; 2593 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2594 PetscFunctionReturn(0); 2595 } 2596 2597 #undef __FUNCT__ 2598 #define __FUNCT__ "DMLoad" 2599 /*@C 2600 DMLoad - Loads a DM that has been stored in binary with DMView(). 2601 2602 Collective on PetscViewer 2603 2604 Input Parameters: 2605 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2606 some related function before a call to DMLoad(). 2607 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2608 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2609 2610 Level: intermediate 2611 2612 Notes: 2613 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2614 2615 Notes for advanced users: 2616 Most users should not need to know the details of the binary storage 2617 format, since DMLoad() and DMView() completely hide these details. 2618 But for anyone who's interested, the standard binary matrix storage 2619 format is 2620 .vb 2621 has not yet been determined 2622 .ve 2623 2624 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2625 @*/ 2626 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2627 { 2628 PetscErrorCode ierr; 2629 PetscBool isbinary; 2630 PetscInt classid; 2631 char type[256]; 2632 2633 PetscFunctionBegin; 2634 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2635 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2636 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2637 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2638 2639 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2640 if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file"); 2641 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2642 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2643 if (newdm->ops->load) { 2644 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2645 } 2646 PetscFunctionReturn(0); 2647 } 2648 2649 /******************************** FEM Support **********************************/ 2650 2651 #undef __FUNCT__ 2652 #define __FUNCT__ "DMPrintCellVector" 2653 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2654 { 2655 PetscInt f; 2656 PetscErrorCode ierr; 2657 2658 PetscFunctionBegin; 2659 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2660 for (f = 0; f < len; ++f) { 2661 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2662 } 2663 PetscFunctionReturn(0); 2664 } 2665 2666 #undef __FUNCT__ 2667 #define __FUNCT__ "DMPrintCellMatrix" 2668 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2669 { 2670 PetscInt f, g; 2671 PetscErrorCode ierr; 2672 2673 PetscFunctionBegin; 2674 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2675 for (f = 0; f < rows; ++f) { 2676 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2677 for (g = 0; g < cols; ++g) { 2678 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2679 } 2680 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2681 } 2682 PetscFunctionReturn(0); 2683 } 2684 2685 #undef __FUNCT__ 2686 #define __FUNCT__ "DMGetDefaultSection" 2687 /*@ 2688 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2689 2690 Input Parameter: 2691 . dm - The DM 2692 2693 Output Parameter: 2694 . section - The PetscSection 2695 2696 Level: intermediate 2697 2698 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2699 2700 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2701 @*/ 2702 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2703 { 2704 PetscFunctionBegin; 2705 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2706 PetscValidPointer(section, 2); 2707 *section = dm->defaultSection; 2708 PetscFunctionReturn(0); 2709 } 2710 2711 #undef __FUNCT__ 2712 #define __FUNCT__ "DMSetDefaultSection" 2713 /*@ 2714 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2715 2716 Input Parameters: 2717 + dm - The DM 2718 - section - The PetscSection 2719 2720 Level: intermediate 2721 2722 Note: Any existing Section will be destroyed 2723 2724 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2725 @*/ 2726 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2727 { 2728 PetscInt numFields; 2729 PetscInt f; 2730 PetscErrorCode ierr; 2731 2732 PetscFunctionBegin; 2733 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2734 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2735 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2736 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2737 dm->defaultSection = section; 2738 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2739 if (numFields) { 2740 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2741 for (f = 0; f < numFields; ++f) { 2742 const char *name; 2743 2744 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2745 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2746 } 2747 } 2748 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2749 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2750 PetscFunctionReturn(0); 2751 } 2752 2753 #undef __FUNCT__ 2754 #define __FUNCT__ "DMGetDefaultGlobalSection" 2755 /*@ 2756 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2757 2758 Collective on DM 2759 2760 Input Parameter: 2761 . dm - The DM 2762 2763 Output Parameter: 2764 . section - The PetscSection 2765 2766 Level: intermediate 2767 2768 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2769 2770 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2771 @*/ 2772 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2773 { 2774 PetscErrorCode ierr; 2775 2776 PetscFunctionBegin; 2777 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2778 PetscValidPointer(section, 2); 2779 if (!dm->defaultGlobalSection) { 2780 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2781 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2782 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2783 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2784 } 2785 *section = dm->defaultGlobalSection; 2786 PetscFunctionReturn(0); 2787 } 2788 2789 #undef __FUNCT__ 2790 #define __FUNCT__ "DMSetDefaultGlobalSection" 2791 /*@ 2792 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2793 2794 Input Parameters: 2795 + dm - The DM 2796 - section - The PetscSection, or NULL 2797 2798 Level: intermediate 2799 2800 Note: Any existing Section will be destroyed 2801 2802 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2803 @*/ 2804 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2805 { 2806 PetscErrorCode ierr; 2807 2808 PetscFunctionBegin; 2809 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2810 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2811 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2812 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2813 dm->defaultGlobalSection = section; 2814 PetscFunctionReturn(0); 2815 } 2816 2817 #undef __FUNCT__ 2818 #define __FUNCT__ "DMGetDefaultSF" 2819 /*@ 2820 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2821 it is created from the default PetscSection layouts in the DM. 2822 2823 Input Parameter: 2824 . dm - The DM 2825 2826 Output Parameter: 2827 . sf - The PetscSF 2828 2829 Level: intermediate 2830 2831 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2832 2833 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 2834 @*/ 2835 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 2836 { 2837 PetscInt nroots; 2838 PetscErrorCode ierr; 2839 2840 PetscFunctionBegin; 2841 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2842 PetscValidPointer(sf, 2); 2843 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 2844 if (nroots < 0) { 2845 PetscSection section, gSection; 2846 2847 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 2848 if (section) { 2849 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 2850 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 2851 } else { 2852 *sf = NULL; 2853 PetscFunctionReturn(0); 2854 } 2855 } 2856 *sf = dm->defaultSF; 2857 PetscFunctionReturn(0); 2858 } 2859 2860 #undef __FUNCT__ 2861 #define __FUNCT__ "DMSetDefaultSF" 2862 /*@ 2863 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 2864 2865 Input Parameters: 2866 + dm - The DM 2867 - sf - The PetscSF 2868 2869 Level: intermediate 2870 2871 Note: Any previous SF is destroyed 2872 2873 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 2874 @*/ 2875 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 2876 { 2877 PetscErrorCode ierr; 2878 2879 PetscFunctionBegin; 2880 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2881 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 2882 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 2883 dm->defaultSF = sf; 2884 PetscFunctionReturn(0); 2885 } 2886 2887 #undef __FUNCT__ 2888 #define __FUNCT__ "DMCreateDefaultSF" 2889 /*@C 2890 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 2891 describing the data layout. 2892 2893 Input Parameters: 2894 + dm - The DM 2895 . localSection - PetscSection describing the local data layout 2896 - globalSection - PetscSection describing the global data layout 2897 2898 Level: intermediate 2899 2900 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 2901 @*/ 2902 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 2903 { 2904 MPI_Comm comm; 2905 PetscLayout layout; 2906 const PetscInt *ranges; 2907 PetscInt *local; 2908 PetscSFNode *remote; 2909 PetscInt pStart, pEnd, p, nroots, nleaves, l; 2910 PetscMPIInt size, rank; 2911 PetscErrorCode ierr; 2912 2913 PetscFunctionBegin; 2914 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 2915 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2916 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 2917 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 2918 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 2919 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 2920 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 2921 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 2922 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 2923 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 2924 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 2925 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 2926 PetscInt gdof, gcdof; 2927 2928 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 2929 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 2930 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 2931 } 2932 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 2933 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 2934 for (p = pStart, l = 0; p < pEnd; ++p) { 2935 const PetscInt *cind; 2936 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 2937 2938 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 2939 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 2940 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 2941 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 2942 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 2943 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 2944 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 2945 if (!gdof) continue; /* Censored point */ 2946 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 2947 if (gsize != dof-cdof) { 2948 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 2949 cdof = 0; /* Ignore constraints */ 2950 } 2951 for (d = 0, c = 0; d < dof; ++d) { 2952 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 2953 local[l+d-c] = off+d; 2954 } 2955 if (gdof < 0) { 2956 for (d = 0; d < gsize; ++d, ++l) { 2957 PetscInt offset = -(goff+1) + d, r; 2958 2959 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 2960 if (r < 0) r = -(r+2); 2961 remote[l].rank = r; 2962 remote[l].index = offset - ranges[r]; 2963 } 2964 } else { 2965 for (d = 0; d < gsize; ++d, ++l) { 2966 remote[l].rank = rank; 2967 remote[l].index = goff+d - ranges[rank]; 2968 } 2969 } 2970 } 2971 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 2972 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 2973 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 2974 PetscFunctionReturn(0); 2975 } 2976 2977 #undef __FUNCT__ 2978 #define __FUNCT__ "DMGetPointSF" 2979 /*@ 2980 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 2981 2982 Input Parameter: 2983 . dm - The DM 2984 2985 Output Parameter: 2986 . sf - The PetscSF 2987 2988 Level: intermediate 2989 2990 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2991 2992 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 2993 @*/ 2994 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 2995 { 2996 PetscFunctionBegin; 2997 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2998 PetscValidPointer(sf, 2); 2999 *sf = dm->sf; 3000 PetscFunctionReturn(0); 3001 } 3002 3003 #undef __FUNCT__ 3004 #define __FUNCT__ "DMSetPointSF" 3005 /*@ 3006 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3007 3008 Input Parameters: 3009 + dm - The DM 3010 - sf - The PetscSF 3011 3012 Level: intermediate 3013 3014 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3015 @*/ 3016 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3017 { 3018 PetscErrorCode ierr; 3019 3020 PetscFunctionBegin; 3021 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3022 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3023 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3024 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3025 dm->sf = sf; 3026 PetscFunctionReturn(0); 3027 } 3028 3029 #undef __FUNCT__ 3030 #define __FUNCT__ "DMGetNumFields" 3031 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3032 { 3033 PetscFunctionBegin; 3034 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3035 PetscValidPointer(numFields, 2); 3036 *numFields = dm->numFields; 3037 PetscFunctionReturn(0); 3038 } 3039 3040 #undef __FUNCT__ 3041 #define __FUNCT__ "DMSetNumFields" 3042 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3043 { 3044 PetscInt f; 3045 PetscErrorCode ierr; 3046 3047 PetscFunctionBegin; 3048 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3049 for (f = 0; f < dm->numFields; ++f) { 3050 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3051 } 3052 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3053 dm->numFields = numFields; 3054 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3055 for (f = 0; f < dm->numFields; ++f) { 3056 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3057 } 3058 PetscFunctionReturn(0); 3059 } 3060 3061 #undef __FUNCT__ 3062 #define __FUNCT__ "DMGetField" 3063 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3064 { 3065 PetscFunctionBegin; 3066 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3067 PetscValidPointer(field, 2); 3068 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3069 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3070 *field = dm->fields[f]; 3071 PetscFunctionReturn(0); 3072 } 3073 3074 #undef __FUNCT__ 3075 #define __FUNCT__ "DMSetCoordinates" 3076 /*@ 3077 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3078 3079 Collective on DM 3080 3081 Input Parameters: 3082 + dm - the DM 3083 - c - coordinate vector 3084 3085 Note: 3086 The coordinates do include those for ghost points, which are in the local vector 3087 3088 Level: intermediate 3089 3090 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3091 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3092 @*/ 3093 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3094 { 3095 PetscErrorCode ierr; 3096 3097 PetscFunctionBegin; 3098 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3099 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3100 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3101 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3102 dm->coordinates = c; 3103 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3104 PetscFunctionReturn(0); 3105 } 3106 3107 #undef __FUNCT__ 3108 #define __FUNCT__ "DMSetCoordinatesLocal" 3109 /*@ 3110 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3111 3112 Collective on DM 3113 3114 Input Parameters: 3115 + dm - the DM 3116 - c - coordinate vector 3117 3118 Note: 3119 The coordinates of ghost points can be set using DMSetCoordinates() 3120 followed by DMGetCoordinatesLocal(). This is intended to enable the 3121 setting of ghost coordinates outside of the domain. 3122 3123 Level: intermediate 3124 3125 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3126 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3127 @*/ 3128 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3129 { 3130 PetscErrorCode ierr; 3131 3132 PetscFunctionBegin; 3133 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3134 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3135 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3136 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3137 3138 dm->coordinatesLocal = c; 3139 3140 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3141 PetscFunctionReturn(0); 3142 } 3143 3144 #undef __FUNCT__ 3145 #define __FUNCT__ "DMGetCoordinates" 3146 /*@ 3147 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3148 3149 Not Collective 3150 3151 Input Parameter: 3152 . dm - the DM 3153 3154 Output Parameter: 3155 . c - global coordinate vector 3156 3157 Note: 3158 This is a borrowed reference, so the user should NOT destroy this vector 3159 3160 Each process has only the local coordinates (does NOT have the ghost coordinates). 3161 3162 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3163 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3164 3165 Level: intermediate 3166 3167 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3168 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3169 @*/ 3170 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3171 { 3172 PetscErrorCode ierr; 3173 3174 PetscFunctionBegin; 3175 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3176 PetscValidPointer(c,2); 3177 if (!dm->coordinates && dm->coordinatesLocal) { 3178 DM cdm = NULL; 3179 3180 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3181 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3182 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3183 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3184 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3185 } 3186 *c = dm->coordinates; 3187 PetscFunctionReturn(0); 3188 } 3189 3190 #undef __FUNCT__ 3191 #define __FUNCT__ "DMGetCoordinatesLocal" 3192 /*@ 3193 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3194 3195 Collective on DM 3196 3197 Input Parameter: 3198 . dm - the DM 3199 3200 Output Parameter: 3201 . c - coordinate vector 3202 3203 Note: 3204 This is a borrowed reference, so the user should NOT destroy this vector 3205 3206 Each process has the local and ghost coordinates 3207 3208 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3209 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3210 3211 Level: intermediate 3212 3213 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3214 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3215 @*/ 3216 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3217 { 3218 PetscErrorCode ierr; 3219 3220 PetscFunctionBegin; 3221 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3222 PetscValidPointer(c,2); 3223 if (!dm->coordinatesLocal && dm->coordinates) { 3224 DM cdm = NULL; 3225 3226 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3227 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3228 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3229 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3230 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3231 } 3232 *c = dm->coordinatesLocal; 3233 PetscFunctionReturn(0); 3234 } 3235 3236 #undef __FUNCT__ 3237 #define __FUNCT__ "DMGetCoordinateDM" 3238 /*@ 3239 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3240 3241 Collective on DM 3242 3243 Input Parameter: 3244 . dm - the DM 3245 3246 Output Parameter: 3247 . cdm - coordinate DM 3248 3249 Level: intermediate 3250 3251 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3252 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3253 @*/ 3254 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3255 { 3256 PetscErrorCode ierr; 3257 3258 PetscFunctionBegin; 3259 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3260 PetscValidPointer(cdm,2); 3261 if (!dm->coordinateDM) { 3262 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3263 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3264 } 3265 *cdm = dm->coordinateDM; 3266 PetscFunctionReturn(0); 3267 } 3268 3269 #undef __FUNCT__ 3270 #define __FUNCT__ "DMLocatePoints" 3271 /*@ 3272 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3273 3274 Not collective 3275 3276 Input Parameters: 3277 + dm - The DM 3278 - v - The Vec of points 3279 3280 Output Parameter: 3281 . cells - The local cell numbers for cells which contain the points 3282 3283 Level: developer 3284 3285 .keywords: point location, mesh 3286 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3287 @*/ 3288 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3289 { 3290 PetscErrorCode ierr; 3291 3292 PetscFunctionBegin; 3293 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3294 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3295 PetscValidPointer(cells,3); 3296 if (dm->ops->locatepoints) { 3297 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3298 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3299 PetscFunctionReturn(0); 3300 } 3301