1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMViewFromOptions" 9 /* 10 DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed. 11 12 Collective on Vec 13 14 Input Parameters: 15 + dm - the DM 16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm' 17 - optionname - option to activate viewing 18 19 Level: intermediate 20 21 .keywords: DM, view, options, database 22 .seealso: VecViewFromOptions(), MatViewFromOptions() 23 */ 24 PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[]) 25 { 26 PetscErrorCode ierr; 27 PetscBool flg; 28 PetscViewer viewer; 29 PetscViewerFormat format; 30 31 PetscFunctionBegin; 32 if (prefix) { 33 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 34 } else { 35 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 36 } 37 if (flg) { 38 ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr); 39 ierr = DMView(dm,viewer);CHKERRQ(ierr); 40 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 41 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 42 } 43 PetscFunctionReturn(0); 44 } 45 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMCreate" 49 /*@ 50 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 51 52 If you never call DMSetType() it will generate an 53 error when you try to use the vector. 54 55 Collective on MPI_Comm 56 57 Input Parameter: 58 . comm - The communicator for the DM object 59 60 Output Parameter: 61 . dm - The DM object 62 63 Level: beginner 64 65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 66 @*/ 67 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 68 { 69 DM v; 70 PetscErrorCode ierr; 71 72 PetscFunctionBegin; 73 PetscValidPointer(dm,2); 74 *dm = NULL; 75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 76 ierr = VecInitializePackage();CHKERRQ(ierr); 77 ierr = MatInitializePackage();CHKERRQ(ierr); 78 ierr = DMInitializePackage();CHKERRQ(ierr); 79 #endif 80 81 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 82 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 83 84 85 v->ltogmap = NULL; 86 v->ltogmapb = NULL; 87 v->bs = 1; 88 v->coloringtype = IS_COLORING_GLOBAL; 89 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 90 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 91 v->defaultSection = NULL; 92 v->defaultGlobalSection = NULL; 93 { 94 PetscInt i; 95 for (i = 0; i < 10; ++i) { 96 v->nullspaceConstructors[i] = NULL; 97 } 98 } 99 v->numFields = 0; 100 v->fields = NULL; 101 102 *dm = v; 103 PetscFunctionReturn(0); 104 } 105 106 #undef __FUNCT__ 107 #define __FUNCT__ "DMClone" 108 /*@ 109 DMClone - Creates a DM object with the same topology as the original. 110 111 Collective on MPI_Comm 112 113 Input Parameter: 114 . dm - The original DM object 115 116 Output Parameter: 117 . newdm - The new DM object 118 119 Level: beginner 120 121 .keywords: DM, topology, create 122 @*/ 123 PetscErrorCode DMClone(DM dm, DM *newdm) 124 { 125 PetscSF sf; 126 Vec coords; 127 void *ctx; 128 PetscErrorCode ierr; 129 130 PetscFunctionBegin; 131 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 132 PetscValidPointer(newdm,2); 133 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 134 if (dm->ops->clone) { 135 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 136 } 137 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 138 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 139 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 140 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 141 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 142 if (coords) { 143 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 144 } else { 145 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 146 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 147 } 148 PetscFunctionReturn(0); 149 } 150 151 #undef __FUNCT__ 152 #define __FUNCT__ "DMSetVecType" 153 /*@C 154 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 155 156 Logically Collective on DMDA 157 158 Input Parameter: 159 + da - initial distributed array 160 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 161 162 Options Database: 163 . -dm_vec_type ctype 164 165 Level: intermediate 166 167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 168 @*/ 169 PetscErrorCode DMSetVecType(DM da,VecType ctype) 170 { 171 PetscErrorCode ierr; 172 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(da,DM_CLASSID,1); 175 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 176 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 177 PetscFunctionReturn(0); 178 } 179 180 #undef __FUNCT__ 181 #define __FUNCT__ "VecGetDM" 182 /*@ 183 VecGetDM - Gets the DM defining the data layout of the vector 184 185 Not collective 186 187 Input Parameter: 188 . v - The Vec 189 190 Output Parameter: 191 . dm - The DM 192 193 Level: intermediate 194 195 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 196 @*/ 197 PetscErrorCode VecGetDM(Vec v, DM *dm) 198 { 199 PetscErrorCode ierr; 200 201 PetscFunctionBegin; 202 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 203 PetscValidPointer(dm,2); 204 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 205 PetscFunctionReturn(0); 206 } 207 208 #undef __FUNCT__ 209 #define __FUNCT__ "VecSetDM" 210 /*@ 211 VecSetDM - Sets the DM defining the data layout of the vector 212 213 Not collective 214 215 Input Parameters: 216 + v - The Vec 217 - dm - The DM 218 219 Level: intermediate 220 221 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 222 @*/ 223 PetscErrorCode VecSetDM(Vec v, DM dm) 224 { 225 PetscErrorCode ierr; 226 227 PetscFunctionBegin; 228 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 229 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 230 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 231 PetscFunctionReturn(0); 232 } 233 234 #undef __FUNCT__ 235 #define __FUNCT__ "DMSetMatType" 236 /*@C 237 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 238 239 Logically Collective on DM 240 241 Input Parameter: 242 + dm - the DM context 243 . ctype - the matrix type 244 245 Options Database: 246 . -dm_mat_type ctype 247 248 Level: intermediate 249 250 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 251 @*/ 252 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 253 { 254 PetscErrorCode ierr; 255 256 PetscFunctionBegin; 257 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 258 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 259 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 260 PetscFunctionReturn(0); 261 } 262 263 #undef __FUNCT__ 264 #define __FUNCT__ "MatGetDM" 265 /*@ 266 MatGetDM - Gets the DM defining the data layout of the matrix 267 268 Not collective 269 270 Input Parameter: 271 . A - The Mat 272 273 Output Parameter: 274 . dm - The DM 275 276 Level: intermediate 277 278 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 279 @*/ 280 PetscErrorCode MatGetDM(Mat A, DM *dm) 281 { 282 PetscErrorCode ierr; 283 284 PetscFunctionBegin; 285 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 286 PetscValidPointer(dm,2); 287 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 288 PetscFunctionReturn(0); 289 } 290 291 #undef __FUNCT__ 292 #define __FUNCT__ "MatSetDM" 293 /*@ 294 MatSetDM - Sets the DM defining the data layout of the matrix 295 296 Not collective 297 298 Input Parameters: 299 + A - The Mat 300 - dm - The DM 301 302 Level: intermediate 303 304 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 305 @*/ 306 PetscErrorCode MatSetDM(Mat A, DM dm) 307 { 308 PetscErrorCode ierr; 309 310 PetscFunctionBegin; 311 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 312 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 313 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 314 PetscFunctionReturn(0); 315 } 316 317 #undef __FUNCT__ 318 #define __FUNCT__ "DMSetOptionsPrefix" 319 /*@C 320 DMSetOptionsPrefix - Sets the prefix used for searching for all 321 DMDA options in the database. 322 323 Logically Collective on DMDA 324 325 Input Parameter: 326 + da - the DMDA context 327 - prefix - the prefix to prepend to all option names 328 329 Notes: 330 A hyphen (-) must NOT be given at the beginning of the prefix name. 331 The first character of all runtime options is AUTOMATICALLY the hyphen. 332 333 Level: advanced 334 335 .keywords: DMDA, set, options, prefix, database 336 337 .seealso: DMSetFromOptions() 338 @*/ 339 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 340 { 341 PetscErrorCode ierr; 342 343 PetscFunctionBegin; 344 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 345 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 346 PetscFunctionReturn(0); 347 } 348 349 #undef __FUNCT__ 350 #define __FUNCT__ "DMDestroy" 351 /*@ 352 DMDestroy - Destroys a vector packer or DMDA. 353 354 Collective on DM 355 356 Input Parameter: 357 . dm - the DM object to destroy 358 359 Level: developer 360 361 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 362 363 @*/ 364 PetscErrorCode DMDestroy(DM *dm) 365 { 366 PetscInt i, cnt = 0, f; 367 DMNamedVecLink nlink,nnext; 368 PetscErrorCode ierr; 369 370 PetscFunctionBegin; 371 if (!*dm) PetscFunctionReturn(0); 372 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 373 374 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 375 for (f = 0; f < (*dm)->numFields; ++f) { 376 ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 377 ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 378 ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 379 } 380 /* count all the circular references of DM and its contained Vecs */ 381 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 382 if ((*dm)->localin[i]) cnt++; 383 if ((*dm)->globalin[i]) cnt++; 384 } 385 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 386 if ((*dm)->x) { 387 DM obj; 388 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 389 if (obj == *dm) cnt++; 390 } 391 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 392 if ((*dm)->x) { 393 DM obj; 394 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 395 if (obj == *dm) cnt++; 396 } 397 398 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 399 /* 400 Need this test because the dm references the vectors that 401 reference the dm, so destroying the dm calls destroy on the 402 vectors that cause another destroy on the dm 403 */ 404 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 405 ((PetscObject) (*dm))->refct = 0; 406 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 407 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 408 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 409 } 410 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 411 nnext = nlink->next; 412 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 413 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 414 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 415 ierr = PetscFree(nlink);CHKERRQ(ierr); 416 } 417 (*dm)->namedglobal = NULL; 418 419 for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */ 420 nnext = nlink->next; 421 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 422 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 423 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 424 ierr = PetscFree(nlink);CHKERRQ(ierr); 425 } 426 (*dm)->namedlocal = NULL; 427 428 /* Destroy the list of hooks */ 429 { 430 DMCoarsenHookLink link,next; 431 for (link=(*dm)->coarsenhook; link; link=next) { 432 next = link->next; 433 ierr = PetscFree(link);CHKERRQ(ierr); 434 } 435 (*dm)->coarsenhook = NULL; 436 } 437 { 438 DMRefineHookLink link,next; 439 for (link=(*dm)->refinehook; link; link=next) { 440 next = link->next; 441 ierr = PetscFree(link);CHKERRQ(ierr); 442 } 443 (*dm)->refinehook = NULL; 444 } 445 { 446 DMSubDomainHookLink link,next; 447 for (link=(*dm)->subdomainhook; link; link=next) { 448 next = link->next; 449 ierr = PetscFree(link);CHKERRQ(ierr); 450 } 451 (*dm)->subdomainhook = NULL; 452 } 453 { 454 DMGlobalToLocalHookLink link,next; 455 for (link=(*dm)->gtolhook; link; link=next) { 456 next = link->next; 457 ierr = PetscFree(link);CHKERRQ(ierr); 458 } 459 (*dm)->gtolhook = NULL; 460 } 461 /* Destroy the work arrays */ 462 { 463 DMWorkLink link,next; 464 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 465 for (link=(*dm)->workin; link; link=next) { 466 next = link->next; 467 ierr = PetscFree(link->mem);CHKERRQ(ierr); 468 ierr = PetscFree(link);CHKERRQ(ierr); 469 } 470 (*dm)->workin = NULL; 471 } 472 473 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 474 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 475 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 476 477 if ((*dm)->ctx && (*dm)->ctxdestroy) { 478 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 479 } 480 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 481 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 482 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 483 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 484 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 485 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 486 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 487 488 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 489 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 490 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 491 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 492 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 493 494 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 495 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 496 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 497 498 for (f = 0; f < (*dm)->numFields; ++f) { 499 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 500 } 501 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 502 /* if memory was published with AMS then destroy it */ 503 ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr); 504 505 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 506 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 507 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 508 PetscFunctionReturn(0); 509 } 510 511 #undef __FUNCT__ 512 #define __FUNCT__ "DMSetUp" 513 /*@ 514 DMSetUp - sets up the data structures inside a DM object 515 516 Collective on DM 517 518 Input Parameter: 519 . dm - the DM object to setup 520 521 Level: developer 522 523 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 524 525 @*/ 526 PetscErrorCode DMSetUp(DM dm) 527 { 528 PetscErrorCode ierr; 529 530 PetscFunctionBegin; 531 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 532 if (dm->setupcalled) PetscFunctionReturn(0); 533 if (dm->ops->setup) { 534 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 535 } 536 dm->setupcalled = PETSC_TRUE; 537 PetscFunctionReturn(0); 538 } 539 540 #undef __FUNCT__ 541 #define __FUNCT__ "DMSetFromOptions" 542 /*@ 543 DMSetFromOptions - sets parameters in a DM from the options database 544 545 Collective on DM 546 547 Input Parameter: 548 . dm - the DM object to set options for 549 550 Options Database: 551 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 552 . -dm_vec_type <type> type of vector to create inside DM 553 . -dm_mat_type <type> type of matrix to create inside DM 554 - -dm_coloring_type <global or ghosted> 555 556 Level: developer 557 558 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 559 560 @*/ 561 PetscErrorCode DMSetFromOptions(DM dm) 562 { 563 char typeName[256] = MATAIJ; 564 PetscBool flg; 565 PetscErrorCode ierr; 566 567 PetscFunctionBegin; 568 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 569 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 570 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 571 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 572 if (flg) { 573 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 574 } 575 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 576 if (flg) { 577 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 578 } 579 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 580 if (dm->ops->setfromoptions) { 581 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 582 } 583 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 584 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 585 ierr = PetscOptionsEnd();CHKERRQ(ierr); 586 ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr); 587 PetscFunctionReturn(0); 588 } 589 590 #undef __FUNCT__ 591 #define __FUNCT__ "DMView" 592 /*@C 593 DMView - Views a vector packer or DMDA. 594 595 Collective on DM 596 597 Input Parameter: 598 + dm - the DM object to view 599 - v - the viewer 600 601 Level: developer 602 603 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 604 605 @*/ 606 PetscErrorCode DMView(DM dm,PetscViewer v) 607 { 608 PetscErrorCode ierr; 609 PetscBool isbinary; 610 611 PetscFunctionBegin; 612 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 613 if (!v) { 614 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 615 } 616 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 617 if (isbinary) { 618 PetscInt classid = DM_FILE_CLASSID; 619 char type[256]; 620 621 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 622 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 623 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 624 } 625 if (dm->ops->view) { 626 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 627 } 628 PetscFunctionReturn(0); 629 } 630 631 #undef __FUNCT__ 632 #define __FUNCT__ "DMCreateGlobalVector" 633 /*@ 634 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 635 636 Collective on DM 637 638 Input Parameter: 639 . dm - the DM object 640 641 Output Parameter: 642 . vec - the global vector 643 644 Level: beginner 645 646 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 647 648 @*/ 649 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 650 { 651 PetscErrorCode ierr; 652 653 PetscFunctionBegin; 654 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 655 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 656 PetscFunctionReturn(0); 657 } 658 659 #undef __FUNCT__ 660 #define __FUNCT__ "DMCreateLocalVector" 661 /*@ 662 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 663 664 Not Collective 665 666 Input Parameter: 667 . dm - the DM object 668 669 Output Parameter: 670 . vec - the local vector 671 672 Level: beginner 673 674 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 675 676 @*/ 677 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 678 { 679 PetscErrorCode ierr; 680 681 PetscFunctionBegin; 682 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 683 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 684 PetscFunctionReturn(0); 685 } 686 687 #undef __FUNCT__ 688 #define __FUNCT__ "DMGetLocalToGlobalMapping" 689 /*@ 690 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 691 692 Collective on DM 693 694 Input Parameter: 695 . dm - the DM that provides the mapping 696 697 Output Parameter: 698 . ltog - the mapping 699 700 Level: intermediate 701 702 Notes: 703 This mapping can then be used by VecSetLocalToGlobalMapping() or 704 MatSetLocalToGlobalMapping(). 705 706 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 707 @*/ 708 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 709 { 710 PetscErrorCode ierr; 711 712 PetscFunctionBegin; 713 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 714 PetscValidPointer(ltog,2); 715 if (!dm->ltogmap) { 716 PetscSection section, sectionGlobal; 717 718 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 719 if (section) { 720 PetscInt *ltog; 721 PetscInt pStart, pEnd, size, p, l; 722 723 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 724 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 725 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 726 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 727 for (p = pStart, l = 0; p < pEnd; ++p) { 728 PetscInt dof, off, c; 729 730 /* Should probably use constrained dofs */ 731 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 732 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 733 for (c = 0; c < dof; ++c, ++l) { 734 ltog[l] = off+c; 735 } 736 } 737 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 738 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 739 } else { 740 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 741 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 742 } 743 } 744 *ltog = dm->ltogmap; 745 PetscFunctionReturn(0); 746 } 747 748 #undef __FUNCT__ 749 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 750 /*@ 751 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 752 753 Collective on DM 754 755 Input Parameter: 756 . da - the distributed array that provides the mapping 757 758 Output Parameter: 759 . ltog - the block mapping 760 761 Level: intermediate 762 763 Notes: 764 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 765 MatSetLocalToGlobalMappingBlock(). 766 767 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 768 @*/ 769 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 770 { 771 PetscErrorCode ierr; 772 773 PetscFunctionBegin; 774 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 775 PetscValidPointer(ltog,2); 776 if (!dm->ltogmapb) { 777 PetscInt bs; 778 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 779 if (bs > 1) { 780 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 781 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 782 } else { 783 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 784 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 785 } 786 } 787 *ltog = dm->ltogmapb; 788 PetscFunctionReturn(0); 789 } 790 791 #undef __FUNCT__ 792 #define __FUNCT__ "DMGetBlockSize" 793 /*@ 794 DMGetBlockSize - Gets the inherent block size associated with a DM 795 796 Not Collective 797 798 Input Parameter: 799 . dm - the DM with block structure 800 801 Output Parameter: 802 . bs - the block size, 1 implies no exploitable block structure 803 804 Level: intermediate 805 806 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 807 @*/ 808 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 809 { 810 PetscFunctionBegin; 811 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 812 PetscValidPointer(bs,2); 813 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 814 *bs = dm->bs; 815 PetscFunctionReturn(0); 816 } 817 818 #undef __FUNCT__ 819 #define __FUNCT__ "DMCreateInterpolation" 820 /*@ 821 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 822 823 Collective on DM 824 825 Input Parameter: 826 + dm1 - the DM object 827 - dm2 - the second, finer DM object 828 829 Output Parameter: 830 + mat - the interpolation 831 - vec - the scaling (optional) 832 833 Level: developer 834 835 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 836 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 837 838 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 839 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 840 841 842 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 843 844 @*/ 845 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 846 { 847 PetscErrorCode ierr; 848 849 PetscFunctionBegin; 850 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 851 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 852 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 853 PetscFunctionReturn(0); 854 } 855 856 #undef __FUNCT__ 857 #define __FUNCT__ "DMCreateInjection" 858 /*@ 859 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 860 861 Collective on DM 862 863 Input Parameter: 864 + dm1 - the DM object 865 - dm2 - the second, finer DM object 866 867 Output Parameter: 868 . ctx - the injection 869 870 Level: developer 871 872 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 873 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 874 875 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 876 877 @*/ 878 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 879 { 880 PetscErrorCode ierr; 881 882 PetscFunctionBegin; 883 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 884 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 885 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 886 PetscFunctionReturn(0); 887 } 888 889 #undef __FUNCT__ 890 #define __FUNCT__ "DMCreateColoring" 891 /*@C 892 DMCreateColoring - Gets coloring for a DMDA or DMComposite 893 894 Collective on DM 895 896 Input Parameter: 897 + dm - the DM object 898 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 899 - matype - either MATAIJ or MATBAIJ 900 901 Output Parameter: 902 . coloring - the coloring 903 904 Level: developer 905 906 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 907 908 @*/ 909 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 910 { 911 PetscErrorCode ierr; 912 913 PetscFunctionBegin; 914 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 915 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 916 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 917 PetscFunctionReturn(0); 918 } 919 920 #undef __FUNCT__ 921 #define __FUNCT__ "DMCreateMatrix" 922 /*@C 923 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 924 925 Collective on DM 926 927 Input Parameter: 928 + dm - the DM object 929 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 930 any type which inherits from one of these (such as MATAIJ) 931 932 Output Parameter: 933 . mat - the empty Jacobian 934 935 Level: beginner 936 937 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 938 do not need to do it yourself. 939 940 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 941 the nonzero pattern call DMDASetMatPreallocateOnly() 942 943 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 944 internally by PETSc. 945 946 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 947 the indices for the global numbering for DMDAs which is complicated. 948 949 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 950 951 @*/ 952 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 953 { 954 PetscErrorCode ierr; 955 956 PetscFunctionBegin; 957 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 958 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 959 ierr = MatInitializePackage();CHKERRQ(ierr); 960 #endif 961 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 962 PetscValidPointer(mat,3); 963 if (dm->mattype) { 964 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 965 } else { 966 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 967 } 968 PetscFunctionReturn(0); 969 } 970 971 #undef __FUNCT__ 972 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 973 /*@ 974 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 975 preallocated but the nonzero structure and zero values will not be set. 976 977 Logically Collective on DMDA 978 979 Input Parameter: 980 + dm - the DM 981 - only - PETSC_TRUE if only want preallocation 982 983 Level: developer 984 .seealso DMCreateMatrix() 985 @*/ 986 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 987 { 988 PetscFunctionBegin; 989 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 990 dm->prealloc_only = only; 991 PetscFunctionReturn(0); 992 } 993 994 #undef __FUNCT__ 995 #define __FUNCT__ "DMGetWorkArray" 996 /*@C 997 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 998 999 Not Collective 1000 1001 Input Parameters: 1002 + dm - the DM object 1003 . count - The minium size 1004 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1005 1006 Output Parameter: 1007 . array - the work array 1008 1009 Level: developer 1010 1011 .seealso DMDestroy(), DMCreate() 1012 @*/ 1013 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1014 { 1015 PetscErrorCode ierr; 1016 DMWorkLink link; 1017 size_t size; 1018 1019 PetscFunctionBegin; 1020 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1021 PetscValidPointer(mem,4); 1022 if (dm->workin) { 1023 link = dm->workin; 1024 dm->workin = dm->workin->next; 1025 } else { 1026 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 1027 } 1028 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1029 if (size*count > link->bytes) { 1030 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1031 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1032 link->bytes = size*count; 1033 } 1034 link->next = dm->workout; 1035 dm->workout = link; 1036 *(void**)mem = link->mem; 1037 PetscFunctionReturn(0); 1038 } 1039 1040 #undef __FUNCT__ 1041 #define __FUNCT__ "DMRestoreWorkArray" 1042 /*@C 1043 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1044 1045 Not Collective 1046 1047 Input Parameters: 1048 + dm - the DM object 1049 . count - The minium size 1050 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1051 1052 Output Parameter: 1053 . array - the work array 1054 1055 Level: developer 1056 1057 .seealso DMDestroy(), DMCreate() 1058 @*/ 1059 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1060 { 1061 DMWorkLink *p,link; 1062 1063 PetscFunctionBegin; 1064 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1065 PetscValidPointer(mem,4); 1066 for (p=&dm->workout; (link=*p); p=&link->next) { 1067 if (link->mem == *(void**)mem) { 1068 *p = link->next; 1069 link->next = dm->workin; 1070 dm->workin = link; 1071 *(void**)mem = NULL; 1072 PetscFunctionReturn(0); 1073 } 1074 } 1075 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1076 PetscFunctionReturn(0); 1077 } 1078 1079 #undef __FUNCT__ 1080 #define __FUNCT__ "DMSetNullSpaceConstructor" 1081 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1082 { 1083 PetscFunctionBegin; 1084 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1085 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1086 dm->nullspaceConstructors[field] = nullsp; 1087 PetscFunctionReturn(0); 1088 } 1089 1090 #undef __FUNCT__ 1091 #define __FUNCT__ "DMCreateFieldIS" 1092 /*@C 1093 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1094 1095 Not collective 1096 1097 Input Parameter: 1098 . dm - the DM object 1099 1100 Output Parameters: 1101 + numFields - The number of fields (or NULL if not requested) 1102 . fieldNames - The name for each field (or NULL if not requested) 1103 - fields - The global indices for each field (or NULL if not requested) 1104 1105 Level: intermediate 1106 1107 Notes: 1108 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1109 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1110 PetscFree(). 1111 1112 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1113 @*/ 1114 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1115 { 1116 PetscSection section, sectionGlobal; 1117 PetscErrorCode ierr; 1118 1119 PetscFunctionBegin; 1120 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1121 if (numFields) { 1122 PetscValidPointer(numFields,2); 1123 *numFields = 0; 1124 } 1125 if (fieldNames) { 1126 PetscValidPointer(fieldNames,3); 1127 *fieldNames = NULL; 1128 } 1129 if (fields) { 1130 PetscValidPointer(fields,4); 1131 *fields = NULL; 1132 } 1133 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1134 if (section) { 1135 PetscInt *fieldSizes, **fieldIndices; 1136 PetscInt nF, f, pStart, pEnd, p; 1137 1138 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1139 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1140 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr); 1141 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1142 for (f = 0; f < nF; ++f) { 1143 fieldSizes[f] = 0; 1144 } 1145 for (p = pStart; p < pEnd; ++p) { 1146 PetscInt gdof; 1147 1148 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1149 if (gdof > 0) { 1150 for (f = 0; f < nF; ++f) { 1151 PetscInt fdof, fcdof; 1152 1153 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1154 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1155 fieldSizes[f] += fdof-fcdof; 1156 } 1157 } 1158 } 1159 for (f = 0; f < nF; ++f) { 1160 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1161 fieldSizes[f] = 0; 1162 } 1163 for (p = pStart; p < pEnd; ++p) { 1164 PetscInt gdof, goff; 1165 1166 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1167 if (gdof > 0) { 1168 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1169 for (f = 0; f < nF; ++f) { 1170 PetscInt fdof, fcdof, fc; 1171 1172 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1173 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1174 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1175 fieldIndices[f][fieldSizes[f]] = goff++; 1176 } 1177 } 1178 } 1179 } 1180 if (numFields) *numFields = nF; 1181 if (fieldNames) { 1182 ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr); 1183 for (f = 0; f < nF; ++f) { 1184 const char *fieldName; 1185 1186 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1187 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1188 } 1189 } 1190 if (fields) { 1191 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1192 for (f = 0; f < nF; ++f) { 1193 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1194 } 1195 } 1196 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1197 } else if (dm->ops->createfieldis) { 1198 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1199 } 1200 PetscFunctionReturn(0); 1201 } 1202 1203 1204 #undef __FUNCT__ 1205 #define __FUNCT__ "DMCreateFieldDecomposition" 1206 /*@C 1207 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1208 corresponding to different fields: each IS contains the global indices of the dofs of the 1209 corresponding field. The optional list of DMs define the DM for each subproblem. 1210 Generalizes DMCreateFieldIS(). 1211 1212 Not collective 1213 1214 Input Parameter: 1215 . dm - the DM object 1216 1217 Output Parameters: 1218 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1219 . namelist - The name for each field (or NULL if not requested) 1220 . islist - The global indices for each field (or NULL if not requested) 1221 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1222 1223 Level: intermediate 1224 1225 Notes: 1226 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1227 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1228 and all of the arrays should be freed with PetscFree(). 1229 1230 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1231 @*/ 1232 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1233 { 1234 PetscErrorCode ierr; 1235 1236 PetscFunctionBegin; 1237 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1238 if (len) { 1239 PetscValidPointer(len,2); 1240 *len = 0; 1241 } 1242 if (namelist) { 1243 PetscValidPointer(namelist,3); 1244 *namelist = 0; 1245 } 1246 if (islist) { 1247 PetscValidPointer(islist,4); 1248 *islist = 0; 1249 } 1250 if (dmlist) { 1251 PetscValidPointer(dmlist,5); 1252 *dmlist = 0; 1253 } 1254 /* 1255 Is it a good idea to apply the following check across all impls? 1256 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1257 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1258 */ 1259 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1260 if (!dm->ops->createfielddecomposition) { 1261 PetscSection section; 1262 PetscInt numFields, f; 1263 1264 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1265 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1266 if (section && numFields && dm->ops->createsubdm) { 1267 *len = numFields; 1268 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1269 for (f = 0; f < numFields; ++f) { 1270 const char *fieldName; 1271 1272 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1273 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1274 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1275 } 1276 } else { 1277 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1278 /* By default there are no DMs associated with subproblems. */ 1279 if (dmlist) *dmlist = NULL; 1280 } 1281 } else { 1282 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1283 } 1284 PetscFunctionReturn(0); 1285 } 1286 1287 #undef __FUNCT__ 1288 #define __FUNCT__ "DMCreateSubDM" 1289 /*@C 1290 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1291 The fields are defined by DMCreateFieldIS(). 1292 1293 Not collective 1294 1295 Input Parameters: 1296 + dm - the DM object 1297 . numFields - number of fields in this subproblem 1298 - len - The number of subproblems in the decomposition (or NULL if not requested) 1299 1300 Output Parameters: 1301 . is - The global indices for the subproblem 1302 - dm - The DM for the subproblem 1303 1304 Level: intermediate 1305 1306 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1307 @*/ 1308 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1309 { 1310 PetscErrorCode ierr; 1311 1312 PetscFunctionBegin; 1313 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1314 PetscValidPointer(fields,3); 1315 if (is) PetscValidPointer(is,4); 1316 if (subdm) PetscValidPointer(subdm,5); 1317 if (dm->ops->createsubdm) { 1318 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1319 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1320 PetscFunctionReturn(0); 1321 } 1322 1323 1324 #undef __FUNCT__ 1325 #define __FUNCT__ "DMCreateDomainDecomposition" 1326 /*@C 1327 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1328 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1329 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1330 define a nonoverlapping covering, while outer subdomains can overlap. 1331 The optional list of DMs define the DM for each subproblem. 1332 1333 Not collective 1334 1335 Input Parameter: 1336 . dm - the DM object 1337 1338 Output Parameters: 1339 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1340 . namelist - The name for each subdomain (or NULL if not requested) 1341 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1342 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1343 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1344 1345 Level: intermediate 1346 1347 Notes: 1348 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1349 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1350 and all of the arrays should be freed with PetscFree(). 1351 1352 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1353 @*/ 1354 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1355 { 1356 PetscErrorCode ierr; 1357 DMSubDomainHookLink link; 1358 PetscInt i,l; 1359 1360 PetscFunctionBegin; 1361 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1362 if (len) {PetscValidPointer(len,2); *len = 0;} 1363 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1364 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1365 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1366 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1367 /* 1368 Is it a good idea to apply the following check across all impls? 1369 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1370 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1371 */ 1372 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1373 if (dm->ops->createdomaindecomposition) { 1374 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1375 /* copy subdomain hooks and context over to the subdomain DMs */ 1376 if (dmlist) { 1377 for (i = 0; i < l; i++) { 1378 for (link=dm->subdomainhook; link; link=link->next) { 1379 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1380 } 1381 (*dmlist)[i]->ctx = dm->ctx; 1382 } 1383 } 1384 if (len) *len = l; 1385 } 1386 PetscFunctionReturn(0); 1387 } 1388 1389 1390 #undef __FUNCT__ 1391 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1392 /*@C 1393 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1394 1395 Not collective 1396 1397 Input Parameters: 1398 + dm - the DM object 1399 . n - the number of subdomain scatters 1400 - subdms - the local subdomains 1401 1402 Output Parameters: 1403 + n - the number of scatters returned 1404 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1405 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1406 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1407 1408 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1409 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1410 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1411 solution and residual data. 1412 1413 Level: developer 1414 1415 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1416 @*/ 1417 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1418 { 1419 PetscErrorCode ierr; 1420 1421 PetscFunctionBegin; 1422 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1423 PetscValidPointer(subdms,3); 1424 if (dm->ops->createddscatters) { 1425 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1426 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1427 PetscFunctionReturn(0); 1428 } 1429 1430 #undef __FUNCT__ 1431 #define __FUNCT__ "DMRefine" 1432 /*@ 1433 DMRefine - Refines a DM object 1434 1435 Collective on DM 1436 1437 Input Parameter: 1438 + dm - the DM object 1439 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1440 1441 Output Parameter: 1442 . dmf - the refined DM, or NULL 1443 1444 Note: If no refinement was done, the return value is NULL 1445 1446 Level: developer 1447 1448 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1449 @*/ 1450 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1451 { 1452 PetscErrorCode ierr; 1453 DMRefineHookLink link; 1454 1455 PetscFunctionBegin; 1456 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1457 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1458 if (*dmf) { 1459 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1460 1461 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1462 1463 (*dmf)->ctx = dm->ctx; 1464 (*dmf)->leveldown = dm->leveldown; 1465 (*dmf)->levelup = dm->levelup + 1; 1466 1467 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1468 for (link=dm->refinehook; link; link=link->next) { 1469 if (link->refinehook) { 1470 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1471 } 1472 } 1473 } 1474 PetscFunctionReturn(0); 1475 } 1476 1477 #undef __FUNCT__ 1478 #define __FUNCT__ "DMRefineHookAdd" 1479 /*@C 1480 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1481 1482 Logically Collective 1483 1484 Input Arguments: 1485 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1486 . refinehook - function to run when setting up a coarser level 1487 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1488 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1489 1490 Calling sequence of refinehook: 1491 $ refinehook(DM coarse,DM fine,void *ctx); 1492 1493 + coarse - coarse level DM 1494 . fine - fine level DM to interpolate problem to 1495 - ctx - optional user-defined function context 1496 1497 Calling sequence for interphook: 1498 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1499 1500 + coarse - coarse level DM 1501 . interp - matrix interpolating a coarse-level solution to the finer grid 1502 . fine - fine level DM to update 1503 - ctx - optional user-defined function context 1504 1505 Level: advanced 1506 1507 Notes: 1508 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1509 1510 If this function is called multiple times, the hooks will be run in the order they are added. 1511 1512 This function is currently not available from Fortran. 1513 1514 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1515 @*/ 1516 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1517 { 1518 PetscErrorCode ierr; 1519 DMRefineHookLink link,*p; 1520 1521 PetscFunctionBegin; 1522 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1523 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1524 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1525 link->refinehook = refinehook; 1526 link->interphook = interphook; 1527 link->ctx = ctx; 1528 link->next = NULL; 1529 *p = link; 1530 PetscFunctionReturn(0); 1531 } 1532 1533 #undef __FUNCT__ 1534 #define __FUNCT__ "DMInterpolate" 1535 /*@ 1536 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1537 1538 Collective if any hooks are 1539 1540 Input Arguments: 1541 + coarse - coarser DM to use as a base 1542 . restrct - interpolation matrix, apply using MatInterpolate() 1543 - fine - finer DM to update 1544 1545 Level: developer 1546 1547 .seealso: DMRefineHookAdd(), MatInterpolate() 1548 @*/ 1549 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1550 { 1551 PetscErrorCode ierr; 1552 DMRefineHookLink link; 1553 1554 PetscFunctionBegin; 1555 for (link=fine->refinehook; link; link=link->next) { 1556 if (link->interphook) { 1557 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1558 } 1559 } 1560 PetscFunctionReturn(0); 1561 } 1562 1563 #undef __FUNCT__ 1564 #define __FUNCT__ "DMGetRefineLevel" 1565 /*@ 1566 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1567 1568 Not Collective 1569 1570 Input Parameter: 1571 . dm - the DM object 1572 1573 Output Parameter: 1574 . level - number of refinements 1575 1576 Level: developer 1577 1578 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1579 1580 @*/ 1581 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1582 { 1583 PetscFunctionBegin; 1584 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1585 *level = dm->levelup; 1586 PetscFunctionReturn(0); 1587 } 1588 1589 #undef __FUNCT__ 1590 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1591 /*@C 1592 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1593 1594 Logically Collective 1595 1596 Input Arguments: 1597 + dm - the DM 1598 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1599 . endhook - function to run after DMGlobalToLocalEnd() has completed 1600 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1601 1602 Calling sequence for beginhook: 1603 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1604 1605 + dm - global DM 1606 . g - global vector 1607 . mode - mode 1608 . l - local vector 1609 - ctx - optional user-defined function context 1610 1611 1612 Calling sequence for endhook: 1613 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1614 1615 + global - global DM 1616 - ctx - optional user-defined function context 1617 1618 Level: advanced 1619 1620 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1621 @*/ 1622 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1623 { 1624 PetscErrorCode ierr; 1625 DMGlobalToLocalHookLink link,*p; 1626 1627 PetscFunctionBegin; 1628 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1629 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1630 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1631 link->beginhook = beginhook; 1632 link->endhook = endhook; 1633 link->ctx = ctx; 1634 link->next = NULL; 1635 *p = link; 1636 PetscFunctionReturn(0); 1637 } 1638 1639 #undef __FUNCT__ 1640 #define __FUNCT__ "DMGlobalToLocalBegin" 1641 /*@ 1642 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1643 1644 Neighbor-wise Collective on DM 1645 1646 Input Parameters: 1647 + dm - the DM object 1648 . g - the global vector 1649 . mode - INSERT_VALUES or ADD_VALUES 1650 - l - the local vector 1651 1652 1653 Level: beginner 1654 1655 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1656 1657 @*/ 1658 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1659 { 1660 PetscSF sf; 1661 PetscErrorCode ierr; 1662 DMGlobalToLocalHookLink link; 1663 1664 PetscFunctionBegin; 1665 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1666 for (link=dm->gtolhook; link; link=link->next) { 1667 if (link->beginhook) { 1668 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1669 } 1670 } 1671 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1672 if (sf) { 1673 PetscScalar *lArray, *gArray; 1674 1675 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1676 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1677 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1678 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1679 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1680 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1681 } else { 1682 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1683 } 1684 PetscFunctionReturn(0); 1685 } 1686 1687 #undef __FUNCT__ 1688 #define __FUNCT__ "DMGlobalToLocalEnd" 1689 /*@ 1690 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1691 1692 Neighbor-wise Collective on DM 1693 1694 Input Parameters: 1695 + dm - the DM object 1696 . g - the global vector 1697 . mode - INSERT_VALUES or ADD_VALUES 1698 - l - the local vector 1699 1700 1701 Level: beginner 1702 1703 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1704 1705 @*/ 1706 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1707 { 1708 PetscSF sf; 1709 PetscErrorCode ierr; 1710 PetscScalar *lArray, *gArray; 1711 DMGlobalToLocalHookLink link; 1712 1713 PetscFunctionBegin; 1714 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1715 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1716 if (sf) { 1717 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1718 1719 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1720 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1721 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1722 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1723 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1724 } else { 1725 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1726 } 1727 for (link=dm->gtolhook; link; link=link->next) { 1728 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1729 } 1730 PetscFunctionReturn(0); 1731 } 1732 1733 #undef __FUNCT__ 1734 #define __FUNCT__ "DMLocalToGlobalBegin" 1735 /*@ 1736 DMLocalToGlobalBegin - updates global vectors from local vectors 1737 1738 Neighbor-wise Collective on DM 1739 1740 Input Parameters: 1741 + dm - the DM object 1742 . l - the local vector 1743 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1744 base point. 1745 - - the global vector 1746 1747 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1748 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1749 global array to the final global array with VecAXPY(). 1750 1751 Level: beginner 1752 1753 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1754 1755 @*/ 1756 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1757 { 1758 PetscSF sf; 1759 PetscErrorCode ierr; 1760 1761 PetscFunctionBegin; 1762 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1763 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1764 if (sf) { 1765 MPI_Op op; 1766 PetscScalar *lArray, *gArray; 1767 1768 switch (mode) { 1769 case INSERT_VALUES: 1770 case INSERT_ALL_VALUES: 1771 op = MPIU_REPLACE; break; 1772 case ADD_VALUES: 1773 case ADD_ALL_VALUES: 1774 op = MPI_SUM; break; 1775 default: 1776 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1777 } 1778 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1779 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1780 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1781 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1782 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1783 } else { 1784 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1785 } 1786 PetscFunctionReturn(0); 1787 } 1788 1789 #undef __FUNCT__ 1790 #define __FUNCT__ "DMLocalToGlobalEnd" 1791 /*@ 1792 DMLocalToGlobalEnd - updates global vectors from local vectors 1793 1794 Neighbor-wise Collective on DM 1795 1796 Input Parameters: 1797 + dm - the DM object 1798 . l - the local vector 1799 . mode - INSERT_VALUES or ADD_VALUES 1800 - g - the global vector 1801 1802 1803 Level: beginner 1804 1805 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1806 1807 @*/ 1808 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1809 { 1810 PetscSF sf; 1811 PetscErrorCode ierr; 1812 1813 PetscFunctionBegin; 1814 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1815 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1816 if (sf) { 1817 MPI_Op op; 1818 PetscScalar *lArray, *gArray; 1819 1820 switch (mode) { 1821 case INSERT_VALUES: 1822 case INSERT_ALL_VALUES: 1823 op = MPIU_REPLACE; break; 1824 case ADD_VALUES: 1825 case ADD_ALL_VALUES: 1826 op = MPI_SUM; break; 1827 default: 1828 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1829 } 1830 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1831 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1832 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1833 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1834 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1835 } else { 1836 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1837 } 1838 PetscFunctionReturn(0); 1839 } 1840 1841 #undef __FUNCT__ 1842 #define __FUNCT__ "DMLocalToLocalBegin" 1843 /*@ 1844 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1845 that contain irrelevant values) to another local vector where the ghost 1846 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1847 1848 Neighbor-wise Collective on DM and Vec 1849 1850 Input Parameters: 1851 + dm - the DM object 1852 . g - the original local vector 1853 - mode - one of INSERT_VALUES or ADD_VALUES 1854 1855 Output Parameter: 1856 . l - the local vector with correct ghost values 1857 1858 Level: intermediate 1859 1860 Notes: 1861 The local vectors used here need not be the same as those 1862 obtained from DMCreateLocalVector(), BUT they 1863 must have the same parallel data layout; they could, for example, be 1864 obtained with VecDuplicate() from the DM originating vectors. 1865 1866 .keywords: DM, local-to-local, begin 1867 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1868 1869 @*/ 1870 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1871 { 1872 PetscErrorCode ierr; 1873 1874 PetscFunctionBegin; 1875 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1876 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1877 PetscFunctionReturn(0); 1878 } 1879 1880 #undef __FUNCT__ 1881 #define __FUNCT__ "DMLocalToLocalEnd" 1882 /*@ 1883 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1884 that contain irrelevant values) to another local vector where the ghost 1885 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1886 1887 Neighbor-wise Collective on DM and Vec 1888 1889 Input Parameters: 1890 + da - the DM object 1891 . g - the original local vector 1892 - mode - one of INSERT_VALUES or ADD_VALUES 1893 1894 Output Parameter: 1895 . l - the local vector with correct ghost values 1896 1897 Level: intermediate 1898 1899 Notes: 1900 The local vectors used here need not be the same as those 1901 obtained from DMCreateLocalVector(), BUT they 1902 must have the same parallel data layout; they could, for example, be 1903 obtained with VecDuplicate() from the DM originating vectors. 1904 1905 .keywords: DM, local-to-local, end 1906 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1907 1908 @*/ 1909 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1910 { 1911 PetscErrorCode ierr; 1912 1913 PetscFunctionBegin; 1914 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1915 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1916 PetscFunctionReturn(0); 1917 } 1918 1919 1920 #undef __FUNCT__ 1921 #define __FUNCT__ "DMCoarsen" 1922 /*@ 1923 DMCoarsen - Coarsens a DM object 1924 1925 Collective on DM 1926 1927 Input Parameter: 1928 + dm - the DM object 1929 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1930 1931 Output Parameter: 1932 . dmc - the coarsened DM 1933 1934 Level: developer 1935 1936 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1937 1938 @*/ 1939 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1940 { 1941 PetscErrorCode ierr; 1942 DMCoarsenHookLink link; 1943 1944 PetscFunctionBegin; 1945 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1946 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1947 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1948 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1949 (*dmc)->ctx = dm->ctx; 1950 (*dmc)->levelup = dm->levelup; 1951 (*dmc)->leveldown = dm->leveldown + 1; 1952 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1953 for (link=dm->coarsenhook; link; link=link->next) { 1954 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1955 } 1956 PetscFunctionReturn(0); 1957 } 1958 1959 #undef __FUNCT__ 1960 #define __FUNCT__ "DMCoarsenHookAdd" 1961 /*@C 1962 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1963 1964 Logically Collective 1965 1966 Input Arguments: 1967 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1968 . coarsenhook - function to run when setting up a coarser level 1969 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1970 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1971 1972 Calling sequence of coarsenhook: 1973 $ coarsenhook(DM fine,DM coarse,void *ctx); 1974 1975 + fine - fine level DM 1976 . coarse - coarse level DM to restrict problem to 1977 - ctx - optional user-defined function context 1978 1979 Calling sequence for restricthook: 1980 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1981 1982 + fine - fine level DM 1983 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1984 . rscale - scaling vector for restriction 1985 . inject - matrix restricting by injection 1986 . coarse - coarse level DM to update 1987 - ctx - optional user-defined function context 1988 1989 Level: advanced 1990 1991 Notes: 1992 This function is only needed if auxiliary data needs to be set up on coarse grids. 1993 1994 If this function is called multiple times, the hooks will be run in the order they are added. 1995 1996 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1997 extract the finest level information from its context (instead of from the SNES). 1998 1999 This function is currently not available from Fortran. 2000 2001 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2002 @*/ 2003 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 2004 { 2005 PetscErrorCode ierr; 2006 DMCoarsenHookLink link,*p; 2007 2008 PetscFunctionBegin; 2009 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2010 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2011 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2012 link->coarsenhook = coarsenhook; 2013 link->restricthook = restricthook; 2014 link->ctx = ctx; 2015 link->next = NULL; 2016 *p = link; 2017 PetscFunctionReturn(0); 2018 } 2019 2020 #undef __FUNCT__ 2021 #define __FUNCT__ "DMRestrict" 2022 /*@ 2023 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2024 2025 Collective if any hooks are 2026 2027 Input Arguments: 2028 + fine - finer DM to use as a base 2029 . restrct - restriction matrix, apply using MatRestrict() 2030 . inject - injection matrix, also use MatRestrict() 2031 - coarse - coarer DM to update 2032 2033 Level: developer 2034 2035 .seealso: DMCoarsenHookAdd(), MatRestrict() 2036 @*/ 2037 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2038 { 2039 PetscErrorCode ierr; 2040 DMCoarsenHookLink link; 2041 2042 PetscFunctionBegin; 2043 for (link=fine->coarsenhook; link; link=link->next) { 2044 if (link->restricthook) { 2045 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2046 } 2047 } 2048 PetscFunctionReturn(0); 2049 } 2050 2051 #undef __FUNCT__ 2052 #define __FUNCT__ "DMSubDomainHookAdd" 2053 /*@C 2054 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2055 2056 Logically Collective 2057 2058 Input Arguments: 2059 + global - global DM 2060 . ddhook - function to run to pass data to the decomposition DM upon its creation 2061 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2062 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2063 2064 2065 Calling sequence for ddhook: 2066 $ ddhook(DM global,DM block,void *ctx) 2067 2068 + global - global DM 2069 . block - block DM 2070 - ctx - optional user-defined function context 2071 2072 Calling sequence for restricthook: 2073 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2074 2075 + global - global DM 2076 . out - scatter to the outer (with ghost and overlap points) block vector 2077 . in - scatter to block vector values only owned locally 2078 . block - block DM 2079 - ctx - optional user-defined function context 2080 2081 Level: advanced 2082 2083 Notes: 2084 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2085 2086 If this function is called multiple times, the hooks will be run in the order they are added. 2087 2088 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2089 extract the global information from its context (instead of from the SNES). 2090 2091 This function is currently not available from Fortran. 2092 2093 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2094 @*/ 2095 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2096 { 2097 PetscErrorCode ierr; 2098 DMSubDomainHookLink link,*p; 2099 2100 PetscFunctionBegin; 2101 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2102 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2103 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2104 link->restricthook = restricthook; 2105 link->ddhook = ddhook; 2106 link->ctx = ctx; 2107 link->next = NULL; 2108 *p = link; 2109 PetscFunctionReturn(0); 2110 } 2111 2112 #undef __FUNCT__ 2113 #define __FUNCT__ "DMSubDomainRestrict" 2114 /*@ 2115 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2116 2117 Collective if any hooks are 2118 2119 Input Arguments: 2120 + fine - finer DM to use as a base 2121 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2122 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2123 - coarse - coarer DM to update 2124 2125 Level: developer 2126 2127 .seealso: DMCoarsenHookAdd(), MatRestrict() 2128 @*/ 2129 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2130 { 2131 PetscErrorCode ierr; 2132 DMSubDomainHookLink link; 2133 2134 PetscFunctionBegin; 2135 for (link=global->subdomainhook; link; link=link->next) { 2136 if (link->restricthook) { 2137 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2138 } 2139 } 2140 PetscFunctionReturn(0); 2141 } 2142 2143 #undef __FUNCT__ 2144 #define __FUNCT__ "DMGetCoarsenLevel" 2145 /*@ 2146 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2147 2148 Not Collective 2149 2150 Input Parameter: 2151 . dm - the DM object 2152 2153 Output Parameter: 2154 . level - number of coarsenings 2155 2156 Level: developer 2157 2158 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2159 2160 @*/ 2161 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2162 { 2163 PetscFunctionBegin; 2164 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2165 *level = dm->leveldown; 2166 PetscFunctionReturn(0); 2167 } 2168 2169 2170 2171 #undef __FUNCT__ 2172 #define __FUNCT__ "DMRefineHierarchy" 2173 /*@C 2174 DMRefineHierarchy - Refines a DM object, all levels at once 2175 2176 Collective on DM 2177 2178 Input Parameter: 2179 + dm - the DM object 2180 - nlevels - the number of levels of refinement 2181 2182 Output Parameter: 2183 . dmf - the refined DM hierarchy 2184 2185 Level: developer 2186 2187 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2188 2189 @*/ 2190 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2191 { 2192 PetscErrorCode ierr; 2193 2194 PetscFunctionBegin; 2195 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2196 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2197 if (nlevels == 0) PetscFunctionReturn(0); 2198 if (dm->ops->refinehierarchy) { 2199 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2200 } else if (dm->ops->refine) { 2201 PetscInt i; 2202 2203 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2204 for (i=1; i<nlevels; i++) { 2205 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2206 } 2207 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2208 PetscFunctionReturn(0); 2209 } 2210 2211 #undef __FUNCT__ 2212 #define __FUNCT__ "DMCoarsenHierarchy" 2213 /*@C 2214 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2215 2216 Collective on DM 2217 2218 Input Parameter: 2219 + dm - the DM object 2220 - nlevels - the number of levels of coarsening 2221 2222 Output Parameter: 2223 . dmc - the coarsened DM hierarchy 2224 2225 Level: developer 2226 2227 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2228 2229 @*/ 2230 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2231 { 2232 PetscErrorCode ierr; 2233 2234 PetscFunctionBegin; 2235 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2236 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2237 if (nlevels == 0) PetscFunctionReturn(0); 2238 PetscValidPointer(dmc,3); 2239 if (dm->ops->coarsenhierarchy) { 2240 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2241 } else if (dm->ops->coarsen) { 2242 PetscInt i; 2243 2244 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2245 for (i=1; i<nlevels; i++) { 2246 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2247 } 2248 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2249 PetscFunctionReturn(0); 2250 } 2251 2252 #undef __FUNCT__ 2253 #define __FUNCT__ "DMCreateAggregates" 2254 /*@ 2255 DMCreateAggregates - Gets the aggregates that map between 2256 grids associated with two DMs. 2257 2258 Collective on DM 2259 2260 Input Parameters: 2261 + dmc - the coarse grid DM 2262 - dmf - the fine grid DM 2263 2264 Output Parameters: 2265 . rest - the restriction matrix (transpose of the projection matrix) 2266 2267 Level: intermediate 2268 2269 .keywords: interpolation, restriction, multigrid 2270 2271 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2272 @*/ 2273 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2274 { 2275 PetscErrorCode ierr; 2276 2277 PetscFunctionBegin; 2278 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2279 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2280 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2281 PetscFunctionReturn(0); 2282 } 2283 2284 #undef __FUNCT__ 2285 #define __FUNCT__ "DMSetApplicationContextDestroy" 2286 /*@C 2287 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2288 2289 Not Collective 2290 2291 Input Parameters: 2292 + dm - the DM object 2293 - destroy - the destroy function 2294 2295 Level: intermediate 2296 2297 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2298 2299 @*/ 2300 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2301 { 2302 PetscFunctionBegin; 2303 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2304 dm->ctxdestroy = destroy; 2305 PetscFunctionReturn(0); 2306 } 2307 2308 #undef __FUNCT__ 2309 #define __FUNCT__ "DMSetApplicationContext" 2310 /*@ 2311 DMSetApplicationContext - Set a user context into a DM object 2312 2313 Not Collective 2314 2315 Input Parameters: 2316 + dm - the DM object 2317 - ctx - the user context 2318 2319 Level: intermediate 2320 2321 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2322 2323 @*/ 2324 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2325 { 2326 PetscFunctionBegin; 2327 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2328 dm->ctx = ctx; 2329 PetscFunctionReturn(0); 2330 } 2331 2332 #undef __FUNCT__ 2333 #define __FUNCT__ "DMGetApplicationContext" 2334 /*@ 2335 DMGetApplicationContext - Gets a user context from a DM object 2336 2337 Not Collective 2338 2339 Input Parameter: 2340 . dm - the DM object 2341 2342 Output Parameter: 2343 . ctx - the user context 2344 2345 Level: intermediate 2346 2347 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2348 2349 @*/ 2350 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2351 { 2352 PetscFunctionBegin; 2353 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2354 *(void**)ctx = dm->ctx; 2355 PetscFunctionReturn(0); 2356 } 2357 2358 #undef __FUNCT__ 2359 #define __FUNCT__ "DMSetVariableBounds" 2360 /*@C 2361 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2362 2363 Logically Collective on DM 2364 2365 Input Parameter: 2366 + dm - the DM object 2367 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2368 2369 Level: intermediate 2370 2371 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2372 DMSetJacobian() 2373 2374 @*/ 2375 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2376 { 2377 PetscFunctionBegin; 2378 dm->ops->computevariablebounds = f; 2379 PetscFunctionReturn(0); 2380 } 2381 2382 #undef __FUNCT__ 2383 #define __FUNCT__ "DMHasVariableBounds" 2384 /*@ 2385 DMHasVariableBounds - does the DM object have a variable bounds function? 2386 2387 Not Collective 2388 2389 Input Parameter: 2390 . dm - the DM object to destroy 2391 2392 Output Parameter: 2393 . flg - PETSC_TRUE if the variable bounds function exists 2394 2395 Level: developer 2396 2397 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2398 2399 @*/ 2400 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2401 { 2402 PetscFunctionBegin; 2403 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2404 PetscFunctionReturn(0); 2405 } 2406 2407 #undef __FUNCT__ 2408 #define __FUNCT__ "DMComputeVariableBounds" 2409 /*@C 2410 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2411 2412 Logically Collective on DM 2413 2414 Input Parameters: 2415 + dm - the DM object to destroy 2416 - x - current solution at which the bounds are computed 2417 2418 Output parameters: 2419 + xl - lower bound 2420 - xu - upper bound 2421 2422 Level: intermediate 2423 2424 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2425 2426 @*/ 2427 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2428 { 2429 PetscErrorCode ierr; 2430 2431 PetscFunctionBegin; 2432 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2433 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2434 if (dm->ops->computevariablebounds) { 2435 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2436 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2437 PetscFunctionReturn(0); 2438 } 2439 2440 #undef __FUNCT__ 2441 #define __FUNCT__ "DMHasColoring" 2442 /*@ 2443 DMHasColoring - does the DM object have a method of providing a coloring? 2444 2445 Not Collective 2446 2447 Input Parameter: 2448 . dm - the DM object 2449 2450 Output Parameter: 2451 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2452 2453 Level: developer 2454 2455 .seealso DMHasFunction(), DMCreateColoring() 2456 2457 @*/ 2458 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2459 { 2460 PetscFunctionBegin; 2461 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2462 PetscFunctionReturn(0); 2463 } 2464 2465 #undef __FUNCT__ 2466 #define __FUNCT__ "DMSetVec" 2467 /*@C 2468 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2469 2470 Collective on DM 2471 2472 Input Parameter: 2473 + dm - the DM object 2474 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2475 2476 Level: developer 2477 2478 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2479 2480 @*/ 2481 PetscErrorCode DMSetVec(DM dm,Vec x) 2482 { 2483 PetscErrorCode ierr; 2484 2485 PetscFunctionBegin; 2486 if (x) { 2487 if (!dm->x) { 2488 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2489 } 2490 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2491 } else if (dm->x) { 2492 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2493 } 2494 PetscFunctionReturn(0); 2495 } 2496 2497 PetscFunctionList DMList = NULL; 2498 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2499 2500 #undef __FUNCT__ 2501 #define __FUNCT__ "DMSetType" 2502 /*@C 2503 DMSetType - Builds a DM, for a particular DM implementation. 2504 2505 Collective on DM 2506 2507 Input Parameters: 2508 + dm - The DM object 2509 - method - The name of the DM type 2510 2511 Options Database Key: 2512 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2513 2514 Notes: 2515 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2516 2517 Level: intermediate 2518 2519 .keywords: DM, set, type 2520 .seealso: DMGetType(), DMCreate() 2521 @*/ 2522 PetscErrorCode DMSetType(DM dm, DMType method) 2523 { 2524 PetscErrorCode (*r)(DM); 2525 PetscBool match; 2526 PetscErrorCode ierr; 2527 2528 PetscFunctionBegin; 2529 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2530 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2531 if (match) PetscFunctionReturn(0); 2532 2533 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2534 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2535 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2536 2537 if (dm->ops->destroy) { 2538 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2539 dm->ops->destroy = NULL; 2540 } 2541 ierr = (*r)(dm);CHKERRQ(ierr); 2542 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2543 PetscFunctionReturn(0); 2544 } 2545 2546 #undef __FUNCT__ 2547 #define __FUNCT__ "DMGetType" 2548 /*@C 2549 DMGetType - Gets the DM type name (as a string) from the DM. 2550 2551 Not Collective 2552 2553 Input Parameter: 2554 . dm - The DM 2555 2556 Output Parameter: 2557 . type - The DM type name 2558 2559 Level: intermediate 2560 2561 .keywords: DM, get, type, name 2562 .seealso: DMSetType(), DMCreate() 2563 @*/ 2564 PetscErrorCode DMGetType(DM dm, DMType *type) 2565 { 2566 PetscErrorCode ierr; 2567 2568 PetscFunctionBegin; 2569 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2570 PetscValidCharPointer(type,2); 2571 if (!DMRegisterAllCalled) { 2572 ierr = DMRegisterAll();CHKERRQ(ierr); 2573 } 2574 *type = ((PetscObject)dm)->type_name; 2575 PetscFunctionReturn(0); 2576 } 2577 2578 #undef __FUNCT__ 2579 #define __FUNCT__ "DMConvert" 2580 /*@C 2581 DMConvert - Converts a DM to another DM, either of the same or different type. 2582 2583 Collective on DM 2584 2585 Input Parameters: 2586 + dm - the DM 2587 - newtype - new DM type (use "same" for the same type) 2588 2589 Output Parameter: 2590 . M - pointer to new DM 2591 2592 Notes: 2593 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2594 the MPI communicator of the generated DM is always the same as the communicator 2595 of the input DM. 2596 2597 Level: intermediate 2598 2599 .seealso: DMCreate() 2600 @*/ 2601 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2602 { 2603 DM B; 2604 char convname[256]; 2605 PetscBool sametype, issame; 2606 PetscErrorCode ierr; 2607 2608 PetscFunctionBegin; 2609 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2610 PetscValidType(dm,1); 2611 PetscValidPointer(M,3); 2612 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2613 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2614 { 2615 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2616 2617 /* 2618 Order of precedence: 2619 1) See if a specialized converter is known to the current DM. 2620 2) See if a specialized converter is known to the desired DM class. 2621 3) See if a good general converter is registered for the desired class 2622 4) See if a good general converter is known for the current matrix. 2623 5) Use a really basic converter. 2624 */ 2625 2626 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2627 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2628 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2629 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2630 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2631 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2632 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2633 if (conv) goto foundconv; 2634 2635 /* 2) See if a specialized converter is known to the desired DM class. */ 2636 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2637 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2638 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2639 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2640 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2641 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2642 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2643 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2644 if (conv) { 2645 ierr = DMDestroy(&B);CHKERRQ(ierr); 2646 goto foundconv; 2647 } 2648 2649 #if 0 2650 /* 3) See if a good general converter is registered for the desired class */ 2651 conv = B->ops->convertfrom; 2652 ierr = DMDestroy(&B);CHKERRQ(ierr); 2653 if (conv) goto foundconv; 2654 2655 /* 4) See if a good general converter is known for the current matrix */ 2656 if (dm->ops->convert) { 2657 conv = dm->ops->convert; 2658 } 2659 if (conv) goto foundconv; 2660 #endif 2661 2662 /* 5) Use a really basic converter. */ 2663 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2664 2665 foundconv: 2666 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2667 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2668 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2669 } 2670 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2671 PetscFunctionReturn(0); 2672 } 2673 2674 /*--------------------------------------------------------------------------------------------------------------------*/ 2675 2676 #undef __FUNCT__ 2677 #define __FUNCT__ "DMRegister" 2678 /*@C 2679 DMRegister - Adds a new DM component implementation 2680 2681 Not Collective 2682 2683 Input Parameters: 2684 + name - The name of a new user-defined creation routine 2685 - create_func - The creation routine itself 2686 2687 Notes: 2688 DMRegister() may be called multiple times to add several user-defined DMs 2689 2690 2691 Sample usage: 2692 .vb 2693 DMRegister("my_da", MyDMCreate); 2694 .ve 2695 2696 Then, your DM type can be chosen with the procedural interface via 2697 .vb 2698 DMCreate(MPI_Comm, DM *); 2699 DMSetType(DM,"my_da"); 2700 .ve 2701 or at runtime via the option 2702 .vb 2703 -da_type my_da 2704 .ve 2705 2706 Level: advanced 2707 2708 .keywords: DM, register 2709 .seealso: DMRegisterAll(), DMRegisterDestroy() 2710 2711 @*/ 2712 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2713 { 2714 PetscErrorCode ierr; 2715 2716 PetscFunctionBegin; 2717 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2718 PetscFunctionReturn(0); 2719 } 2720 2721 #undef __FUNCT__ 2722 #define __FUNCT__ "DMLoad" 2723 /*@C 2724 DMLoad - Loads a DM that has been stored in binary with DMView(). 2725 2726 Collective on PetscViewer 2727 2728 Input Parameters: 2729 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2730 some related function before a call to DMLoad(). 2731 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2732 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2733 2734 Level: intermediate 2735 2736 Notes: 2737 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2738 2739 Notes for advanced users: 2740 Most users should not need to know the details of the binary storage 2741 format, since DMLoad() and DMView() completely hide these details. 2742 But for anyone who's interested, the standard binary matrix storage 2743 format is 2744 .vb 2745 has not yet been determined 2746 .ve 2747 2748 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2749 @*/ 2750 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2751 { 2752 PetscErrorCode ierr; 2753 PetscBool isbinary; 2754 PetscInt classid; 2755 char type[256]; 2756 2757 PetscFunctionBegin; 2758 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2759 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2760 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2761 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2762 2763 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2764 if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file"); 2765 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2766 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2767 if (newdm->ops->load) { 2768 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2769 } 2770 PetscFunctionReturn(0); 2771 } 2772 2773 /******************************** FEM Support **********************************/ 2774 2775 #undef __FUNCT__ 2776 #define __FUNCT__ "DMPrintCellVector" 2777 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2778 { 2779 PetscInt f; 2780 PetscErrorCode ierr; 2781 2782 PetscFunctionBegin; 2783 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2784 for (f = 0; f < len; ++f) { 2785 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2786 } 2787 PetscFunctionReturn(0); 2788 } 2789 2790 #undef __FUNCT__ 2791 #define __FUNCT__ "DMPrintCellMatrix" 2792 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2793 { 2794 PetscInt f, g; 2795 PetscErrorCode ierr; 2796 2797 PetscFunctionBegin; 2798 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2799 for (f = 0; f < rows; ++f) { 2800 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2801 for (g = 0; g < cols; ++g) { 2802 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2803 } 2804 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2805 } 2806 PetscFunctionReturn(0); 2807 } 2808 2809 #undef __FUNCT__ 2810 #define __FUNCT__ "DMGetDefaultSection" 2811 /*@ 2812 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2813 2814 Input Parameter: 2815 . dm - The DM 2816 2817 Output Parameter: 2818 . section - The PetscSection 2819 2820 Level: intermediate 2821 2822 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2823 2824 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2825 @*/ 2826 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2827 { 2828 PetscFunctionBegin; 2829 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2830 PetscValidPointer(section, 2); 2831 *section = dm->defaultSection; 2832 PetscFunctionReturn(0); 2833 } 2834 2835 #undef __FUNCT__ 2836 #define __FUNCT__ "DMSetDefaultSection" 2837 /*@ 2838 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2839 2840 Input Parameters: 2841 + dm - The DM 2842 - section - The PetscSection 2843 2844 Level: intermediate 2845 2846 Note: Any existing Section will be destroyed 2847 2848 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2849 @*/ 2850 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2851 { 2852 PetscInt numFields; 2853 PetscInt f; 2854 PetscErrorCode ierr; 2855 2856 PetscFunctionBegin; 2857 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2858 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2859 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2860 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2861 dm->defaultSection = section; 2862 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2863 if (numFields) { 2864 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2865 for (f = 0; f < numFields; ++f) { 2866 const char *name; 2867 2868 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2869 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2870 } 2871 } 2872 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2873 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2874 PetscFunctionReturn(0); 2875 } 2876 2877 #undef __FUNCT__ 2878 #define __FUNCT__ "DMGetDefaultGlobalSection" 2879 /*@ 2880 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2881 2882 Collective on DM 2883 2884 Input Parameter: 2885 . dm - The DM 2886 2887 Output Parameter: 2888 . section - The PetscSection 2889 2890 Level: intermediate 2891 2892 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2893 2894 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2895 @*/ 2896 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2897 { 2898 PetscErrorCode ierr; 2899 2900 PetscFunctionBegin; 2901 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2902 PetscValidPointer(section, 2); 2903 if (!dm->defaultGlobalSection) { 2904 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2905 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2906 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2907 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2908 } 2909 *section = dm->defaultGlobalSection; 2910 PetscFunctionReturn(0); 2911 } 2912 2913 #undef __FUNCT__ 2914 #define __FUNCT__ "DMSetDefaultGlobalSection" 2915 /*@ 2916 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2917 2918 Input Parameters: 2919 + dm - The DM 2920 - section - The PetscSection, or NULL 2921 2922 Level: intermediate 2923 2924 Note: Any existing Section will be destroyed 2925 2926 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2927 @*/ 2928 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2929 { 2930 PetscErrorCode ierr; 2931 2932 PetscFunctionBegin; 2933 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2934 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2935 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2936 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2937 dm->defaultGlobalSection = section; 2938 PetscFunctionReturn(0); 2939 } 2940 2941 #undef __FUNCT__ 2942 #define __FUNCT__ "DMGetDefaultSF" 2943 /*@ 2944 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2945 it is created from the default PetscSection layouts in the DM. 2946 2947 Input Parameter: 2948 . dm - The DM 2949 2950 Output Parameter: 2951 . sf - The PetscSF 2952 2953 Level: intermediate 2954 2955 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2956 2957 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 2958 @*/ 2959 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 2960 { 2961 PetscInt nroots; 2962 PetscErrorCode ierr; 2963 2964 PetscFunctionBegin; 2965 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2966 PetscValidPointer(sf, 2); 2967 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 2968 if (nroots < 0) { 2969 PetscSection section, gSection; 2970 2971 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 2972 if (section) { 2973 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 2974 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 2975 } else { 2976 *sf = NULL; 2977 PetscFunctionReturn(0); 2978 } 2979 } 2980 *sf = dm->defaultSF; 2981 PetscFunctionReturn(0); 2982 } 2983 2984 #undef __FUNCT__ 2985 #define __FUNCT__ "DMSetDefaultSF" 2986 /*@ 2987 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 2988 2989 Input Parameters: 2990 + dm - The DM 2991 - sf - The PetscSF 2992 2993 Level: intermediate 2994 2995 Note: Any previous SF is destroyed 2996 2997 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 2998 @*/ 2999 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 3000 { 3001 PetscErrorCode ierr; 3002 3003 PetscFunctionBegin; 3004 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3005 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3006 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3007 dm->defaultSF = sf; 3008 PetscFunctionReturn(0); 3009 } 3010 3011 #undef __FUNCT__ 3012 #define __FUNCT__ "DMCreateDefaultSF" 3013 /*@C 3014 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3015 describing the data layout. 3016 3017 Input Parameters: 3018 + dm - The DM 3019 . localSection - PetscSection describing the local data layout 3020 - globalSection - PetscSection describing the global data layout 3021 3022 Level: intermediate 3023 3024 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3025 @*/ 3026 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3027 { 3028 MPI_Comm comm; 3029 PetscLayout layout; 3030 const PetscInt *ranges; 3031 PetscInt *local; 3032 PetscSFNode *remote; 3033 PetscInt pStart, pEnd, p, nroots, nleaves = 0, l; 3034 PetscMPIInt size, rank; 3035 PetscErrorCode ierr; 3036 3037 PetscFunctionBegin; 3038 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3039 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3040 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3041 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3042 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3043 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3044 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3045 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3046 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3047 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3048 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3049 for (p = pStart; p < pEnd; ++p) { 3050 PetscInt gdof, gcdof; 3051 3052 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3053 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3054 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3055 } 3056 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3057 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3058 for (p = pStart, l = 0; p < pEnd; ++p) { 3059 const PetscInt *cind; 3060 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3061 3062 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3063 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3064 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3065 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3066 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3067 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3068 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3069 if (!gdof) continue; /* Censored point */ 3070 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3071 if (gsize != dof-cdof) { 3072 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3073 cdof = 0; /* Ignore constraints */ 3074 } 3075 for (d = 0, c = 0; d < dof; ++d) { 3076 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3077 local[l+d-c] = off+d; 3078 } 3079 if (gdof < 0) { 3080 for (d = 0; d < gsize; ++d, ++l) { 3081 PetscInt offset = -(goff+1) + d, r; 3082 3083 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 3084 if (r < 0) r = -(r+2); 3085 remote[l].rank = r; 3086 remote[l].index = offset - ranges[r]; 3087 } 3088 } else { 3089 for (d = 0; d < gsize; ++d, ++l) { 3090 remote[l].rank = rank; 3091 remote[l].index = goff+d - ranges[rank]; 3092 } 3093 } 3094 } 3095 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3096 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3097 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3098 PetscFunctionReturn(0); 3099 } 3100 3101 #undef __FUNCT__ 3102 #define __FUNCT__ "DMGetPointSF" 3103 /*@ 3104 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3105 3106 Input Parameter: 3107 . dm - The DM 3108 3109 Output Parameter: 3110 . sf - The PetscSF 3111 3112 Level: intermediate 3113 3114 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3115 3116 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3117 @*/ 3118 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3119 { 3120 PetscFunctionBegin; 3121 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3122 PetscValidPointer(sf, 2); 3123 *sf = dm->sf; 3124 PetscFunctionReturn(0); 3125 } 3126 3127 #undef __FUNCT__ 3128 #define __FUNCT__ "DMSetPointSF" 3129 /*@ 3130 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3131 3132 Input Parameters: 3133 + dm - The DM 3134 - sf - The PetscSF 3135 3136 Level: intermediate 3137 3138 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3139 @*/ 3140 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3141 { 3142 PetscErrorCode ierr; 3143 3144 PetscFunctionBegin; 3145 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3146 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3147 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3148 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3149 dm->sf = sf; 3150 PetscFunctionReturn(0); 3151 } 3152 3153 #undef __FUNCT__ 3154 #define __FUNCT__ "DMGetNumFields" 3155 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3156 { 3157 PetscFunctionBegin; 3158 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3159 PetscValidPointer(numFields, 2); 3160 *numFields = dm->numFields; 3161 PetscFunctionReturn(0); 3162 } 3163 3164 #undef __FUNCT__ 3165 #define __FUNCT__ "DMSetNumFields" 3166 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3167 { 3168 PetscInt f; 3169 PetscErrorCode ierr; 3170 3171 PetscFunctionBegin; 3172 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3173 for (f = 0; f < dm->numFields; ++f) { 3174 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3175 } 3176 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3177 dm->numFields = numFields; 3178 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3179 for (f = 0; f < dm->numFields; ++f) { 3180 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3181 } 3182 PetscFunctionReturn(0); 3183 } 3184 3185 #undef __FUNCT__ 3186 #define __FUNCT__ "DMGetField" 3187 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3188 { 3189 PetscFunctionBegin; 3190 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3191 PetscValidPointer(field, 2); 3192 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3193 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3194 *field = dm->fields[f]; 3195 PetscFunctionReturn(0); 3196 } 3197 3198 #undef __FUNCT__ 3199 #define __FUNCT__ "DMRestrictHook_Coordinates" 3200 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3201 { 3202 DM dm_coord,dmc_coord; 3203 PetscErrorCode ierr; 3204 Vec coords,ccoords; 3205 VecScatter scat; 3206 PetscFunctionBegin; 3207 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3208 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3209 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3210 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3211 if (coords && !ccoords) { 3212 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3213 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3214 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3215 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3216 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3217 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3218 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3219 } 3220 PetscFunctionReturn(0); 3221 } 3222 3223 #undef __FUNCT__ 3224 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3225 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3226 { 3227 DM dm_coord,subdm_coord; 3228 PetscErrorCode ierr; 3229 Vec coords,ccoords,clcoords; 3230 VecScatter *scat_i,*scat_g; 3231 PetscFunctionBegin; 3232 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3233 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3234 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3235 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3236 if (coords && !ccoords) { 3237 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3238 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3239 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3240 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3241 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3242 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3243 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3244 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3245 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3246 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3247 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3248 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3249 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3250 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3251 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3252 } 3253 PetscFunctionReturn(0); 3254 } 3255 3256 #undef __FUNCT__ 3257 #define __FUNCT__ "DMSetCoordinates" 3258 /*@ 3259 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3260 3261 Collective on DM 3262 3263 Input Parameters: 3264 + dm - the DM 3265 - c - coordinate vector 3266 3267 Note: 3268 The coordinates do include those for ghost points, which are in the local vector 3269 3270 Level: intermediate 3271 3272 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3273 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3274 @*/ 3275 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3276 { 3277 PetscErrorCode ierr; 3278 3279 PetscFunctionBegin; 3280 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3281 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3282 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3283 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3284 dm->coordinates = c; 3285 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3286 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3287 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3288 PetscFunctionReturn(0); 3289 } 3290 3291 #undef __FUNCT__ 3292 #define __FUNCT__ "DMSetCoordinatesLocal" 3293 /*@ 3294 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3295 3296 Collective on DM 3297 3298 Input Parameters: 3299 + dm - the DM 3300 - c - coordinate vector 3301 3302 Note: 3303 The coordinates of ghost points can be set using DMSetCoordinates() 3304 followed by DMGetCoordinatesLocal(). This is intended to enable the 3305 setting of ghost coordinates outside of the domain. 3306 3307 Level: intermediate 3308 3309 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3310 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3311 @*/ 3312 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3313 { 3314 PetscErrorCode ierr; 3315 3316 PetscFunctionBegin; 3317 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3318 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3319 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3320 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3321 3322 dm->coordinatesLocal = c; 3323 3324 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3325 PetscFunctionReturn(0); 3326 } 3327 3328 #undef __FUNCT__ 3329 #define __FUNCT__ "DMGetCoordinates" 3330 /*@ 3331 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3332 3333 Not Collective 3334 3335 Input Parameter: 3336 . dm - the DM 3337 3338 Output Parameter: 3339 . c - global coordinate vector 3340 3341 Note: 3342 This is a borrowed reference, so the user should NOT destroy this vector 3343 3344 Each process has only the local coordinates (does NOT have the ghost coordinates). 3345 3346 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3347 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3348 3349 Level: intermediate 3350 3351 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3352 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3353 @*/ 3354 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3355 { 3356 PetscErrorCode ierr; 3357 3358 PetscFunctionBegin; 3359 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3360 PetscValidPointer(c,2); 3361 if (!dm->coordinates && dm->coordinatesLocal) { 3362 DM cdm = NULL; 3363 3364 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3365 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3366 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3367 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3368 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3369 } 3370 *c = dm->coordinates; 3371 PetscFunctionReturn(0); 3372 } 3373 3374 #undef __FUNCT__ 3375 #define __FUNCT__ "DMGetCoordinatesLocal" 3376 /*@ 3377 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3378 3379 Collective on DM 3380 3381 Input Parameter: 3382 . dm - the DM 3383 3384 Output Parameter: 3385 . c - coordinate vector 3386 3387 Note: 3388 This is a borrowed reference, so the user should NOT destroy this vector 3389 3390 Each process has the local and ghost coordinates 3391 3392 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3393 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3394 3395 Level: intermediate 3396 3397 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3398 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3399 @*/ 3400 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3401 { 3402 PetscErrorCode ierr; 3403 3404 PetscFunctionBegin; 3405 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3406 PetscValidPointer(c,2); 3407 if (!dm->coordinatesLocal && dm->coordinates) { 3408 DM cdm = NULL; 3409 3410 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3411 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3412 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3413 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3414 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3415 } 3416 *c = dm->coordinatesLocal; 3417 PetscFunctionReturn(0); 3418 } 3419 3420 #undef __FUNCT__ 3421 #define __FUNCT__ "DMGetCoordinateDM" 3422 /*@ 3423 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3424 3425 Collective on DM 3426 3427 Input Parameter: 3428 . dm - the DM 3429 3430 Output Parameter: 3431 . cdm - coordinate DM 3432 3433 Level: intermediate 3434 3435 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3436 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3437 @*/ 3438 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3439 { 3440 PetscErrorCode ierr; 3441 3442 PetscFunctionBegin; 3443 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3444 PetscValidPointer(cdm,2); 3445 if (!dm->coordinateDM) { 3446 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3447 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3448 } 3449 *cdm = dm->coordinateDM; 3450 PetscFunctionReturn(0); 3451 } 3452 3453 #undef __FUNCT__ 3454 #define __FUNCT__ "DMLocatePoints" 3455 /*@ 3456 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3457 3458 Not collective 3459 3460 Input Parameters: 3461 + dm - The DM 3462 - v - The Vec of points 3463 3464 Output Parameter: 3465 . cells - The local cell numbers for cells which contain the points 3466 3467 Level: developer 3468 3469 .keywords: point location, mesh 3470 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3471 @*/ 3472 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3473 { 3474 PetscErrorCode ierr; 3475 3476 PetscFunctionBegin; 3477 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3478 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3479 PetscValidPointer(cells,3); 3480 if (dm->ops->locatepoints) { 3481 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3482 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3483 PetscFunctionReturn(0); 3484 } 3485