1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0}; 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "DMCreate" 11 /*@ 12 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 13 14 If you never call DMSetType() it will generate an 15 error when you try to use the vector. 16 17 Collective on MPI_Comm 18 19 Input Parameter: 20 . comm - The communicator for the DM object 21 22 Output Parameter: 23 . dm - The DM object 24 25 Level: beginner 26 27 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 28 @*/ 29 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 30 { 31 DM v; 32 PetscErrorCode ierr; 33 34 PetscFunctionBegin; 35 PetscValidPointer(dm,2); 36 *dm = NULL; 37 ierr = PetscSysInitializePackage();CHKERRQ(ierr); 38 ierr = VecInitializePackage();CHKERRQ(ierr); 39 ierr = MatInitializePackage();CHKERRQ(ierr); 40 ierr = DMInitializePackage();CHKERRQ(ierr); 41 42 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 43 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 44 45 46 v->ltogmap = NULL; 47 v->ltogmapb = NULL; 48 v->bs = 1; 49 v->coloringtype = IS_COLORING_GLOBAL; 50 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 51 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 52 v->defaultSection = NULL; 53 v->defaultGlobalSection = NULL; 54 v->L = NULL; 55 v->maxCell = NULL; 56 { 57 PetscInt i; 58 for (i = 0; i < 10; ++i) { 59 v->nullspaceConstructors[i] = NULL; 60 } 61 } 62 v->numFields = 0; 63 v->fields = NULL; 64 v->dmBC = NULL; 65 v->outputSequenceNum = -1; 66 ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr); 67 ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr); 68 *dm = v; 69 PetscFunctionReturn(0); 70 } 71 72 #undef __FUNCT__ 73 #define __FUNCT__ "DMClone" 74 /*@ 75 DMClone - Creates a DM object with the same topology as the original. 76 77 Collective on MPI_Comm 78 79 Input Parameter: 80 . dm - The original DM object 81 82 Output Parameter: 83 . newdm - The new DM object 84 85 Level: beginner 86 87 .keywords: DM, topology, create 88 @*/ 89 PetscErrorCode DMClone(DM dm, DM *newdm) 90 { 91 PetscSF sf; 92 Vec coords; 93 void *ctx; 94 PetscErrorCode ierr; 95 96 PetscFunctionBegin; 97 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 98 PetscValidPointer(newdm,2); 99 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 100 if (dm->ops->clone) { 101 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 102 } 103 (*newdm)->setupcalled = PETSC_TRUE; 104 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 105 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 106 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 107 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 108 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 109 if (coords) { 110 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 111 } else { 112 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 113 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 114 } 115 if (dm->maxCell) { 116 const PetscReal *maxCell, *L; 117 ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr); 118 ierr = DMSetPeriodicity(*newdm, maxCell, L);CHKERRQ(ierr); 119 } 120 PetscFunctionReturn(0); 121 } 122 123 #undef __FUNCT__ 124 #define __FUNCT__ "DMSetVecType" 125 /*@C 126 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 127 128 Logically Collective on DMDA 129 130 Input Parameter: 131 + da - initial distributed array 132 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 133 134 Options Database: 135 . -dm_vec_type ctype 136 137 Level: intermediate 138 139 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType() 140 @*/ 141 PetscErrorCode DMSetVecType(DM da,VecType ctype) 142 { 143 PetscErrorCode ierr; 144 145 PetscFunctionBegin; 146 PetscValidHeaderSpecific(da,DM_CLASSID,1); 147 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 148 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 149 PetscFunctionReturn(0); 150 } 151 152 #undef __FUNCT__ 153 #define __FUNCT__ "DMGetVecType" 154 /*@C 155 DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 156 157 Logically Collective on DMDA 158 159 Input Parameter: 160 . da - initial distributed array 161 162 Output Parameter: 163 . ctype - the vector type 164 165 Level: intermediate 166 167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 168 @*/ 169 PetscErrorCode DMGetVecType(DM da,VecType *ctype) 170 { 171 PetscFunctionBegin; 172 PetscValidHeaderSpecific(da,DM_CLASSID,1); 173 *ctype = da->vectype; 174 PetscFunctionReturn(0); 175 } 176 177 #undef __FUNCT__ 178 #define __FUNCT__ "VecGetDM" 179 /*@ 180 VecGetDM - Gets the DM defining the data layout of the vector 181 182 Not collective 183 184 Input Parameter: 185 . v - The Vec 186 187 Output Parameter: 188 . dm - The DM 189 190 Level: intermediate 191 192 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 193 @*/ 194 PetscErrorCode VecGetDM(Vec v, DM *dm) 195 { 196 PetscErrorCode ierr; 197 198 PetscFunctionBegin; 199 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 200 PetscValidPointer(dm,2); 201 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 202 PetscFunctionReturn(0); 203 } 204 205 #undef __FUNCT__ 206 #define __FUNCT__ "VecSetDM" 207 /*@ 208 VecSetDM - Sets the DM defining the data layout of the vector 209 210 Not collective 211 212 Input Parameters: 213 + v - The Vec 214 - dm - The DM 215 216 Level: intermediate 217 218 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 219 @*/ 220 PetscErrorCode VecSetDM(Vec v, DM dm) 221 { 222 PetscErrorCode ierr; 223 224 PetscFunctionBegin; 225 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 226 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 227 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 228 PetscFunctionReturn(0); 229 } 230 231 #undef __FUNCT__ 232 #define __FUNCT__ "DMSetMatType" 233 /*@C 234 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 235 236 Logically Collective on DM 237 238 Input Parameter: 239 + dm - the DM context 240 . ctype - the matrix type 241 242 Options Database: 243 . -dm_mat_type ctype 244 245 Level: intermediate 246 247 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType() 248 @*/ 249 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 250 { 251 PetscErrorCode ierr; 252 253 PetscFunctionBegin; 254 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 255 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 256 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 257 PetscFunctionReturn(0); 258 } 259 260 #undef __FUNCT__ 261 #define __FUNCT__ "DMGetMatType" 262 /*@C 263 DMGetMatType - Gets the type of matrix created with DMCreateMatrix() 264 265 Logically Collective on DM 266 267 Input Parameter: 268 . dm - the DM context 269 270 Output Parameter: 271 . ctype - the matrix type 272 273 Options Database: 274 . -dm_mat_type ctype 275 276 Level: intermediate 277 278 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType() 279 @*/ 280 PetscErrorCode DMGetMatType(DM dm,MatType *ctype) 281 { 282 PetscFunctionBegin; 283 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 284 *ctype = dm->mattype; 285 PetscFunctionReturn(0); 286 } 287 288 #undef __FUNCT__ 289 #define __FUNCT__ "MatGetDM" 290 /*@ 291 MatGetDM - Gets the DM defining the data layout of the matrix 292 293 Not collective 294 295 Input Parameter: 296 . A - The Mat 297 298 Output Parameter: 299 . dm - The DM 300 301 Level: intermediate 302 303 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 304 @*/ 305 PetscErrorCode MatGetDM(Mat A, DM *dm) 306 { 307 PetscErrorCode ierr; 308 309 PetscFunctionBegin; 310 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 311 PetscValidPointer(dm,2); 312 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 313 PetscFunctionReturn(0); 314 } 315 316 #undef __FUNCT__ 317 #define __FUNCT__ "MatSetDM" 318 /*@ 319 MatSetDM - Sets the DM defining the data layout of the matrix 320 321 Not collective 322 323 Input Parameters: 324 + A - The Mat 325 - dm - The DM 326 327 Level: intermediate 328 329 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 330 @*/ 331 PetscErrorCode MatSetDM(Mat A, DM dm) 332 { 333 PetscErrorCode ierr; 334 335 PetscFunctionBegin; 336 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 337 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 338 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 339 PetscFunctionReturn(0); 340 } 341 342 #undef __FUNCT__ 343 #define __FUNCT__ "DMSetOptionsPrefix" 344 /*@C 345 DMSetOptionsPrefix - Sets the prefix used for searching for all 346 DMDA options in the database. 347 348 Logically Collective on DMDA 349 350 Input Parameter: 351 + da - the DMDA context 352 - prefix - the prefix to prepend to all option names 353 354 Notes: 355 A hyphen (-) must NOT be given at the beginning of the prefix name. 356 The first character of all runtime options is AUTOMATICALLY the hyphen. 357 358 Level: advanced 359 360 .keywords: DMDA, set, options, prefix, database 361 362 .seealso: DMSetFromOptions() 363 @*/ 364 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 365 { 366 PetscErrorCode ierr; 367 368 PetscFunctionBegin; 369 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 370 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 371 PetscFunctionReturn(0); 372 } 373 374 #undef __FUNCT__ 375 #define __FUNCT__ "DMDestroy" 376 /*@ 377 DMDestroy - Destroys a vector packer or DMDA. 378 379 Collective on DM 380 381 Input Parameter: 382 . dm - the DM object to destroy 383 384 Level: developer 385 386 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 387 388 @*/ 389 PetscErrorCode DMDestroy(DM *dm) 390 { 391 PetscInt i, cnt = 0, f; 392 DMNamedVecLink nlink,nnext; 393 PetscErrorCode ierr; 394 395 PetscFunctionBegin; 396 if (!*dm) PetscFunctionReturn(0); 397 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 398 399 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 400 for (f = 0; f < (*dm)->numFields; ++f) { 401 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 402 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 403 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 404 } 405 /* count all the circular references of DM and its contained Vecs */ 406 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 407 if ((*dm)->localin[i]) cnt++; 408 if ((*dm)->globalin[i]) cnt++; 409 } 410 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 411 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 412 if ((*dm)->x) { 413 DM obj; 414 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 415 if (obj == *dm) cnt++; 416 } 417 418 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 419 /* 420 Need this test because the dm references the vectors that 421 reference the dm, so destroying the dm calls destroy on the 422 vectors that cause another destroy on the dm 423 */ 424 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 425 ((PetscObject) (*dm))->refct = 0; 426 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 427 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 428 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 429 } 430 nnext=(*dm)->namedglobal; 431 (*dm)->namedglobal = NULL; 432 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */ 433 nnext = nlink->next; 434 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 435 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 436 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 437 ierr = PetscFree(nlink);CHKERRQ(ierr); 438 } 439 nnext=(*dm)->namedlocal; 440 (*dm)->namedlocal = NULL; 441 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */ 442 nnext = nlink->next; 443 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 444 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 445 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 446 ierr = PetscFree(nlink);CHKERRQ(ierr); 447 } 448 449 /* Destroy the list of hooks */ 450 { 451 DMCoarsenHookLink link,next; 452 for (link=(*dm)->coarsenhook; link; link=next) { 453 next = link->next; 454 ierr = PetscFree(link);CHKERRQ(ierr); 455 } 456 (*dm)->coarsenhook = NULL; 457 } 458 { 459 DMRefineHookLink link,next; 460 for (link=(*dm)->refinehook; link; link=next) { 461 next = link->next; 462 ierr = PetscFree(link);CHKERRQ(ierr); 463 } 464 (*dm)->refinehook = NULL; 465 } 466 { 467 DMSubDomainHookLink link,next; 468 for (link=(*dm)->subdomainhook; link; link=next) { 469 next = link->next; 470 ierr = PetscFree(link);CHKERRQ(ierr); 471 } 472 (*dm)->subdomainhook = NULL; 473 } 474 { 475 DMGlobalToLocalHookLink link,next; 476 for (link=(*dm)->gtolhook; link; link=next) { 477 next = link->next; 478 ierr = PetscFree(link);CHKERRQ(ierr); 479 } 480 (*dm)->gtolhook = NULL; 481 } 482 /* Destroy the work arrays */ 483 { 484 DMWorkLink link,next; 485 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 486 for (link=(*dm)->workin; link; link=next) { 487 next = link->next; 488 ierr = PetscFree(link->mem);CHKERRQ(ierr); 489 ierr = PetscFree(link);CHKERRQ(ierr); 490 } 491 (*dm)->workin = NULL; 492 } 493 494 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 495 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 496 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 497 498 if ((*dm)->ctx && (*dm)->ctxdestroy) { 499 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 500 } 501 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 502 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 503 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 504 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 505 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 506 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 507 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 508 509 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 510 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 511 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 512 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 513 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 514 515 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 516 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 517 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 518 ierr = PetscFree((*dm)->maxCell);CHKERRQ(ierr); 519 ierr = PetscFree((*dm)->L);CHKERRQ(ierr); 520 521 for (f = 0; f < (*dm)->numFields; ++f) { 522 ierr = PetscObjectDestroy((PetscObject *) &(*dm)->fields[f]);CHKERRQ(ierr); 523 } 524 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 525 ierr = DMDestroy(&(*dm)->dmBC);CHKERRQ(ierr); 526 /* if memory was published with SAWs then destroy it */ 527 ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr); 528 529 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 530 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 531 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 532 PetscFunctionReturn(0); 533 } 534 535 #undef __FUNCT__ 536 #define __FUNCT__ "DMSetUp" 537 /*@ 538 DMSetUp - sets up the data structures inside a DM object 539 540 Collective on DM 541 542 Input Parameter: 543 . dm - the DM object to setup 544 545 Level: developer 546 547 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 548 549 @*/ 550 PetscErrorCode DMSetUp(DM dm) 551 { 552 PetscErrorCode ierr; 553 554 PetscFunctionBegin; 555 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 556 if (dm->setupcalled) PetscFunctionReturn(0); 557 if (dm->ops->setup) { 558 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 559 } 560 dm->setupcalled = PETSC_TRUE; 561 PetscFunctionReturn(0); 562 } 563 564 #undef __FUNCT__ 565 #define __FUNCT__ "DMSetFromOptions" 566 /*@ 567 DMSetFromOptions - sets parameters in a DM from the options database 568 569 Collective on DM 570 571 Input Parameter: 572 . dm - the DM object to set options for 573 574 Options Database: 575 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 576 . -dm_vec_type <type> type of vector to create inside DM 577 . -dm_mat_type <type> type of matrix to create inside DM 578 - -dm_coloring_type <global or ghosted> 579 580 Level: developer 581 582 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 583 584 @*/ 585 PetscErrorCode DMSetFromOptions(DM dm) 586 { 587 char typeName[256]; 588 PetscBool flg; 589 PetscErrorCode ierr; 590 591 PetscFunctionBegin; 592 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 593 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 594 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 595 ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 596 if (flg) { 597 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 598 } 599 ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 600 if (flg) { 601 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 602 } 603 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 604 if (dm->ops->setfromoptions) { 605 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 606 } 607 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 608 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 609 ierr = PetscOptionsEnd();CHKERRQ(ierr); 610 PetscFunctionReturn(0); 611 } 612 613 #undef __FUNCT__ 614 #define __FUNCT__ "DMView" 615 /*@C 616 DMView - Views a vector packer or DMDA. 617 618 Collective on DM 619 620 Input Parameter: 621 + dm - the DM object to view 622 - v - the viewer 623 624 Level: developer 625 626 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 627 628 @*/ 629 PetscErrorCode DMView(DM dm,PetscViewer v) 630 { 631 PetscErrorCode ierr; 632 PetscBool isbinary; 633 634 PetscFunctionBegin; 635 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 636 if (!v) { 637 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 638 } 639 ierr = PetscObjectPrintClassNamePrefixType((PetscObject)dm,v);CHKERRQ(ierr); 640 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 641 if (isbinary) { 642 PetscInt classid = DM_FILE_CLASSID; 643 char type[256]; 644 645 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 646 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 647 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 648 } 649 if (dm->ops->view) { 650 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 651 } 652 PetscFunctionReturn(0); 653 } 654 655 #undef __FUNCT__ 656 #define __FUNCT__ "DMCreateGlobalVector" 657 /*@ 658 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 659 660 Collective on DM 661 662 Input Parameter: 663 . dm - the DM object 664 665 Output Parameter: 666 . vec - the global vector 667 668 Level: beginner 669 670 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 671 672 @*/ 673 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 674 { 675 PetscErrorCode ierr; 676 677 PetscFunctionBegin; 678 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 679 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 680 PetscFunctionReturn(0); 681 } 682 683 #undef __FUNCT__ 684 #define __FUNCT__ "DMCreateLocalVector" 685 /*@ 686 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 687 688 Not Collective 689 690 Input Parameter: 691 . dm - the DM object 692 693 Output Parameter: 694 . vec - the local vector 695 696 Level: beginner 697 698 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 699 700 @*/ 701 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 702 { 703 PetscErrorCode ierr; 704 705 PetscFunctionBegin; 706 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 707 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 708 PetscFunctionReturn(0); 709 } 710 711 #undef __FUNCT__ 712 #define __FUNCT__ "DMGetLocalToGlobalMapping" 713 /*@ 714 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 715 716 Collective on DM 717 718 Input Parameter: 719 . dm - the DM that provides the mapping 720 721 Output Parameter: 722 . ltog - the mapping 723 724 Level: intermediate 725 726 Notes: 727 This mapping can then be used by VecSetLocalToGlobalMapping() or 728 MatSetLocalToGlobalMapping(). 729 730 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 731 @*/ 732 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 733 { 734 PetscErrorCode ierr; 735 736 PetscFunctionBegin; 737 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 738 PetscValidPointer(ltog,2); 739 if (!dm->ltogmap) { 740 PetscSection section, sectionGlobal; 741 742 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 743 if (section) { 744 PetscInt *ltog; 745 PetscInt pStart, pEnd, size, p, l; 746 747 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 748 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 749 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 750 ierr = PetscMalloc1(size, <og);CHKERRQ(ierr); /* We want the local+overlap size */ 751 for (p = pStart, l = 0; p < pEnd; ++p) { 752 PetscInt dof, off, c; 753 754 /* Should probably use constrained dofs */ 755 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 756 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 757 for (c = 0; c < dof; ++c, ++l) { 758 ltog[l] = off+c; 759 } 760 } 761 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 762 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 763 } else { 764 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 765 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 766 } 767 } 768 *ltog = dm->ltogmap; 769 PetscFunctionReturn(0); 770 } 771 772 #undef __FUNCT__ 773 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 774 /*@ 775 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 776 777 Collective on DM 778 779 Input Parameter: 780 . da - the distributed array that provides the mapping 781 782 Output Parameter: 783 . ltog - the block mapping 784 785 Level: intermediate 786 787 Notes: 788 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 789 MatSetLocalToGlobalMappingBlock(). 790 791 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 792 @*/ 793 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 794 { 795 PetscErrorCode ierr; 796 797 PetscFunctionBegin; 798 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 799 PetscValidPointer(ltog,2); 800 if (!dm->ltogmapb) { 801 PetscInt bs; 802 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 803 if (bs > 1) { 804 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 805 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 806 } else { 807 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 808 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 809 } 810 } 811 *ltog = dm->ltogmapb; 812 PetscFunctionReturn(0); 813 } 814 815 #undef __FUNCT__ 816 #define __FUNCT__ "DMGetBlockSize" 817 /*@ 818 DMGetBlockSize - Gets the inherent block size associated with a DM 819 820 Not Collective 821 822 Input Parameter: 823 . dm - the DM with block structure 824 825 Output Parameter: 826 . bs - the block size, 1 implies no exploitable block structure 827 828 Level: intermediate 829 830 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 831 @*/ 832 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 833 { 834 PetscFunctionBegin; 835 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 836 PetscValidPointer(bs,2); 837 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 838 *bs = dm->bs; 839 PetscFunctionReturn(0); 840 } 841 842 #undef __FUNCT__ 843 #define __FUNCT__ "DMCreateInterpolation" 844 /*@ 845 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 846 847 Collective on DM 848 849 Input Parameter: 850 + dm1 - the DM object 851 - dm2 - the second, finer DM object 852 853 Output Parameter: 854 + mat - the interpolation 855 - vec - the scaling (optional) 856 857 Level: developer 858 859 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 860 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 861 862 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 863 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 864 865 866 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 867 868 @*/ 869 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 870 { 871 PetscErrorCode ierr; 872 873 PetscFunctionBegin; 874 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 875 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 876 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 877 PetscFunctionReturn(0); 878 } 879 880 #undef __FUNCT__ 881 #define __FUNCT__ "DMCreateInjection" 882 /*@ 883 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 884 885 Collective on DM 886 887 Input Parameter: 888 + dm1 - the DM object 889 - dm2 - the second, finer DM object 890 891 Output Parameter: 892 . ctx - the injection 893 894 Level: developer 895 896 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 897 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 898 899 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 900 901 @*/ 902 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 903 { 904 PetscErrorCode ierr; 905 906 PetscFunctionBegin; 907 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 908 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 909 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 910 PetscFunctionReturn(0); 911 } 912 913 #undef __FUNCT__ 914 #define __FUNCT__ "DMCreateColoring" 915 /*@ 916 DMCreateColoring - Gets coloring for a DM 917 918 Collective on DM 919 920 Input Parameter: 921 + dm - the DM object 922 - ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 923 924 Output Parameter: 925 . coloring - the coloring 926 927 Level: developer 928 929 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType() 930 931 @*/ 932 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring) 933 { 934 PetscErrorCode ierr; 935 936 PetscFunctionBegin; 937 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 938 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 939 ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr); 940 PetscFunctionReturn(0); 941 } 942 943 #undef __FUNCT__ 944 #define __FUNCT__ "DMCreateMatrix" 945 /*@ 946 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 947 948 Collective on DM 949 950 Input Parameter: 951 . dm - the DM object 952 953 Output Parameter: 954 . mat - the empty Jacobian 955 956 Level: beginner 957 958 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 959 do not need to do it yourself. 960 961 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 962 the nonzero pattern call DMDASetMatPreallocateOnly() 963 964 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 965 internally by PETSc. 966 967 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 968 the indices for the global numbering for DMDAs which is complicated. 969 970 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType() 971 972 @*/ 973 PetscErrorCode DMCreateMatrix(DM dm,Mat *mat) 974 { 975 PetscErrorCode ierr; 976 977 PetscFunctionBegin; 978 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 979 ierr = MatInitializePackage();CHKERRQ(ierr); 980 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 981 PetscValidPointer(mat,3); 982 ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr); 983 PetscFunctionReturn(0); 984 } 985 986 #undef __FUNCT__ 987 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 988 /*@ 989 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 990 preallocated but the nonzero structure and zero values will not be set. 991 992 Logically Collective on DMDA 993 994 Input Parameter: 995 + dm - the DM 996 - only - PETSC_TRUE if only want preallocation 997 998 Level: developer 999 .seealso DMCreateMatrix() 1000 @*/ 1001 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 1002 { 1003 PetscFunctionBegin; 1004 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1005 dm->prealloc_only = only; 1006 PetscFunctionReturn(0); 1007 } 1008 1009 #undef __FUNCT__ 1010 #define __FUNCT__ "DMGetWorkArray" 1011 /*@C 1012 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1013 1014 Not Collective 1015 1016 Input Parameters: 1017 + dm - the DM object 1018 . count - The minium size 1019 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1020 1021 Output Parameter: 1022 . array - the work array 1023 1024 Level: developer 1025 1026 .seealso DMDestroy(), DMCreate() 1027 @*/ 1028 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1029 { 1030 PetscErrorCode ierr; 1031 DMWorkLink link; 1032 size_t size; 1033 1034 PetscFunctionBegin; 1035 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1036 PetscValidPointer(mem,4); 1037 if (dm->workin) { 1038 link = dm->workin; 1039 dm->workin = dm->workin->next; 1040 } else { 1041 ierr = PetscNewLog(dm,&link);CHKERRQ(ierr); 1042 } 1043 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1044 if (size*count > link->bytes) { 1045 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1046 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1047 link->bytes = size*count; 1048 } 1049 link->next = dm->workout; 1050 dm->workout = link; 1051 *(void**)mem = link->mem; 1052 PetscFunctionReturn(0); 1053 } 1054 1055 #undef __FUNCT__ 1056 #define __FUNCT__ "DMRestoreWorkArray" 1057 /*@C 1058 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1059 1060 Not Collective 1061 1062 Input Parameters: 1063 + dm - the DM object 1064 . count - The minium size 1065 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1066 1067 Output Parameter: 1068 . array - the work array 1069 1070 Level: developer 1071 1072 .seealso DMDestroy(), DMCreate() 1073 @*/ 1074 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1075 { 1076 DMWorkLink *p,link; 1077 1078 PetscFunctionBegin; 1079 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1080 PetscValidPointer(mem,4); 1081 for (p=&dm->workout; (link=*p); p=&link->next) { 1082 if (link->mem == *(void**)mem) { 1083 *p = link->next; 1084 link->next = dm->workin; 1085 dm->workin = link; 1086 *(void**)mem = NULL; 1087 PetscFunctionReturn(0); 1088 } 1089 } 1090 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1091 PetscFunctionReturn(0); 1092 } 1093 1094 #undef __FUNCT__ 1095 #define __FUNCT__ "DMSetNullSpaceConstructor" 1096 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1097 { 1098 PetscFunctionBegin; 1099 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1100 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1101 dm->nullspaceConstructors[field] = nullsp; 1102 PetscFunctionReturn(0); 1103 } 1104 1105 #undef __FUNCT__ 1106 #define __FUNCT__ "DMCreateFieldIS" 1107 /*@C 1108 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1109 1110 Not collective 1111 1112 Input Parameter: 1113 . dm - the DM object 1114 1115 Output Parameters: 1116 + numFields - The number of fields (or NULL if not requested) 1117 . fieldNames - The name for each field (or NULL if not requested) 1118 - fields - The global indices for each field (or NULL if not requested) 1119 1120 Level: intermediate 1121 1122 Notes: 1123 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1124 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1125 PetscFree(). 1126 1127 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1128 @*/ 1129 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1130 { 1131 PetscSection section, sectionGlobal; 1132 PetscErrorCode ierr; 1133 1134 PetscFunctionBegin; 1135 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1136 if (numFields) { 1137 PetscValidPointer(numFields,2); 1138 *numFields = 0; 1139 } 1140 if (fieldNames) { 1141 PetscValidPointer(fieldNames,3); 1142 *fieldNames = NULL; 1143 } 1144 if (fields) { 1145 PetscValidPointer(fields,4); 1146 *fields = NULL; 1147 } 1148 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1149 if (section) { 1150 PetscInt *fieldSizes, **fieldIndices; 1151 PetscInt nF, f, pStart, pEnd, p; 1152 1153 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1154 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1155 ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr); 1156 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1157 for (f = 0; f < nF; ++f) { 1158 fieldSizes[f] = 0; 1159 } 1160 for (p = pStart; p < pEnd; ++p) { 1161 PetscInt gdof; 1162 1163 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1164 if (gdof > 0) { 1165 for (f = 0; f < nF; ++f) { 1166 PetscInt fdof, fcdof; 1167 1168 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1169 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1170 fieldSizes[f] += fdof-fcdof; 1171 } 1172 } 1173 } 1174 for (f = 0; f < nF; ++f) { 1175 ierr = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr); 1176 fieldSizes[f] = 0; 1177 } 1178 for (p = pStart; p < pEnd; ++p) { 1179 PetscInt gdof, goff; 1180 1181 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1182 if (gdof > 0) { 1183 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1184 for (f = 0; f < nF; ++f) { 1185 PetscInt fdof, fcdof, fc; 1186 1187 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1188 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1189 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1190 fieldIndices[f][fieldSizes[f]] = goff++; 1191 } 1192 } 1193 } 1194 } 1195 if (numFields) *numFields = nF; 1196 if (fieldNames) { 1197 ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr); 1198 for (f = 0; f < nF; ++f) { 1199 const char *fieldName; 1200 1201 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1202 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1203 } 1204 } 1205 if (fields) { 1206 ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr); 1207 for (f = 0; f < nF; ++f) { 1208 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1209 } 1210 } 1211 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1212 } else if (dm->ops->createfieldis) { 1213 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1214 } 1215 PetscFunctionReturn(0); 1216 } 1217 1218 1219 #undef __FUNCT__ 1220 #define __FUNCT__ "DMCreateFieldDecomposition" 1221 /*@C 1222 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1223 corresponding to different fields: each IS contains the global indices of the dofs of the 1224 corresponding field. The optional list of DMs define the DM for each subproblem. 1225 Generalizes DMCreateFieldIS(). 1226 1227 Not collective 1228 1229 Input Parameter: 1230 . dm - the DM object 1231 1232 Output Parameters: 1233 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1234 . namelist - The name for each field (or NULL if not requested) 1235 . islist - The global indices for each field (or NULL if not requested) 1236 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1237 1238 Level: intermediate 1239 1240 Notes: 1241 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1242 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1243 and all of the arrays should be freed with PetscFree(). 1244 1245 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1246 @*/ 1247 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1248 { 1249 PetscErrorCode ierr; 1250 1251 PetscFunctionBegin; 1252 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1253 if (len) { 1254 PetscValidPointer(len,2); 1255 *len = 0; 1256 } 1257 if (namelist) { 1258 PetscValidPointer(namelist,3); 1259 *namelist = 0; 1260 } 1261 if (islist) { 1262 PetscValidPointer(islist,4); 1263 *islist = 0; 1264 } 1265 if (dmlist) { 1266 PetscValidPointer(dmlist,5); 1267 *dmlist = 0; 1268 } 1269 /* 1270 Is it a good idea to apply the following check across all impls? 1271 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1272 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1273 */ 1274 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1275 if (!dm->ops->createfielddecomposition) { 1276 PetscSection section; 1277 PetscInt numFields, f; 1278 1279 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1280 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1281 if (section && numFields && dm->ops->createsubdm) { 1282 *len = numFields; 1283 if (namelist) {ierr = PetscMalloc1(numFields,namelist);CHKERRQ(ierr);} 1284 if (islist) {ierr = PetscMalloc1(numFields,islist);CHKERRQ(ierr);} 1285 if (dmlist) {ierr = PetscMalloc1(numFields,dmlist);CHKERRQ(ierr);} 1286 for (f = 0; f < numFields; ++f) { 1287 const char *fieldName; 1288 1289 ierr = DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);CHKERRQ(ierr); 1290 if (namelist) { 1291 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1292 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1293 } 1294 } 1295 } else { 1296 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1297 /* By default there are no DMs associated with subproblems. */ 1298 if (dmlist) *dmlist = NULL; 1299 } 1300 } else { 1301 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1302 } 1303 PetscFunctionReturn(0); 1304 } 1305 1306 #undef __FUNCT__ 1307 #define __FUNCT__ "DMCreateSubDM" 1308 /*@C 1309 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1310 The fields are defined by DMCreateFieldIS(). 1311 1312 Not collective 1313 1314 Input Parameters: 1315 + dm - the DM object 1316 . numFields - number of fields in this subproblem 1317 - len - The number of subproblems in the decomposition (or NULL if not requested) 1318 1319 Output Parameters: 1320 . is - The global indices for the subproblem 1321 - dm - The DM for the subproblem 1322 1323 Level: intermediate 1324 1325 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1326 @*/ 1327 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1328 { 1329 PetscErrorCode ierr; 1330 1331 PetscFunctionBegin; 1332 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1333 PetscValidPointer(fields,3); 1334 if (is) PetscValidPointer(is,4); 1335 if (subdm) PetscValidPointer(subdm,5); 1336 if (dm->ops->createsubdm) { 1337 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1338 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1339 PetscFunctionReturn(0); 1340 } 1341 1342 1343 #undef __FUNCT__ 1344 #define __FUNCT__ "DMCreateDomainDecomposition" 1345 /*@C 1346 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1347 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1348 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1349 define a nonoverlapping covering, while outer subdomains can overlap. 1350 The optional list of DMs define the DM for each subproblem. 1351 1352 Not collective 1353 1354 Input Parameter: 1355 . dm - the DM object 1356 1357 Output Parameters: 1358 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1359 . namelist - The name for each subdomain (or NULL if not requested) 1360 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1361 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1362 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1363 1364 Level: intermediate 1365 1366 Notes: 1367 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1368 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1369 and all of the arrays should be freed with PetscFree(). 1370 1371 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1372 @*/ 1373 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1374 { 1375 PetscErrorCode ierr; 1376 DMSubDomainHookLink link; 1377 PetscInt i,l; 1378 1379 PetscFunctionBegin; 1380 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1381 if (len) {PetscValidPointer(len,2); *len = 0;} 1382 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1383 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1384 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1385 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1386 /* 1387 Is it a good idea to apply the following check across all impls? 1388 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1389 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1390 */ 1391 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1392 if (dm->ops->createdomaindecomposition) { 1393 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1394 /* copy subdomain hooks and context over to the subdomain DMs */ 1395 if (dmlist) { 1396 for (i = 0; i < l; i++) { 1397 for (link=dm->subdomainhook; link; link=link->next) { 1398 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1399 } 1400 (*dmlist)[i]->ctx = dm->ctx; 1401 } 1402 } 1403 if (len) *len = l; 1404 } 1405 PetscFunctionReturn(0); 1406 } 1407 1408 1409 #undef __FUNCT__ 1410 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1411 /*@C 1412 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1413 1414 Not collective 1415 1416 Input Parameters: 1417 + dm - the DM object 1418 . n - the number of subdomain scatters 1419 - subdms - the local subdomains 1420 1421 Output Parameters: 1422 + n - the number of scatters returned 1423 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1424 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1425 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1426 1427 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1428 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1429 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1430 solution and residual data. 1431 1432 Level: developer 1433 1434 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1435 @*/ 1436 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1437 { 1438 PetscErrorCode ierr; 1439 1440 PetscFunctionBegin; 1441 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1442 PetscValidPointer(subdms,3); 1443 if (dm->ops->createddscatters) { 1444 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1445 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1446 PetscFunctionReturn(0); 1447 } 1448 1449 #undef __FUNCT__ 1450 #define __FUNCT__ "DMRefine" 1451 /*@ 1452 DMRefine - Refines a DM object 1453 1454 Collective on DM 1455 1456 Input Parameter: 1457 + dm - the DM object 1458 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1459 1460 Output Parameter: 1461 . dmf - the refined DM, or NULL 1462 1463 Note: If no refinement was done, the return value is NULL 1464 1465 Level: developer 1466 1467 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1468 @*/ 1469 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1470 { 1471 PetscErrorCode ierr; 1472 DMRefineHookLink link; 1473 1474 PetscFunctionBegin; 1475 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1476 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1477 if (*dmf) { 1478 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1479 1480 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1481 1482 (*dmf)->ctx = dm->ctx; 1483 (*dmf)->leveldown = dm->leveldown; 1484 (*dmf)->levelup = dm->levelup + 1; 1485 1486 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1487 for (link=dm->refinehook; link; link=link->next) { 1488 if (link->refinehook) { 1489 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1490 } 1491 } 1492 } 1493 PetscFunctionReturn(0); 1494 } 1495 1496 #undef __FUNCT__ 1497 #define __FUNCT__ "DMRefineHookAdd" 1498 /*@C 1499 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1500 1501 Logically Collective 1502 1503 Input Arguments: 1504 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1505 . refinehook - function to run when setting up a coarser level 1506 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1507 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1508 1509 Calling sequence of refinehook: 1510 $ refinehook(DM coarse,DM fine,void *ctx); 1511 1512 + coarse - coarse level DM 1513 . fine - fine level DM to interpolate problem to 1514 - ctx - optional user-defined function context 1515 1516 Calling sequence for interphook: 1517 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1518 1519 + coarse - coarse level DM 1520 . interp - matrix interpolating a coarse-level solution to the finer grid 1521 . fine - fine level DM to update 1522 - ctx - optional user-defined function context 1523 1524 Level: advanced 1525 1526 Notes: 1527 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1528 1529 If this function is called multiple times, the hooks will be run in the order they are added. 1530 1531 This function is currently not available from Fortran. 1532 1533 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1534 @*/ 1535 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1536 { 1537 PetscErrorCode ierr; 1538 DMRefineHookLink link,*p; 1539 1540 PetscFunctionBegin; 1541 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1542 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1543 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1544 link->refinehook = refinehook; 1545 link->interphook = interphook; 1546 link->ctx = ctx; 1547 link->next = NULL; 1548 *p = link; 1549 PetscFunctionReturn(0); 1550 } 1551 1552 #undef __FUNCT__ 1553 #define __FUNCT__ "DMInterpolate" 1554 /*@ 1555 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1556 1557 Collective if any hooks are 1558 1559 Input Arguments: 1560 + coarse - coarser DM to use as a base 1561 . restrct - interpolation matrix, apply using MatInterpolate() 1562 - fine - finer DM to update 1563 1564 Level: developer 1565 1566 .seealso: DMRefineHookAdd(), MatInterpolate() 1567 @*/ 1568 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1569 { 1570 PetscErrorCode ierr; 1571 DMRefineHookLink link; 1572 1573 PetscFunctionBegin; 1574 for (link=fine->refinehook; link; link=link->next) { 1575 if (link->interphook) { 1576 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1577 } 1578 } 1579 PetscFunctionReturn(0); 1580 } 1581 1582 #undef __FUNCT__ 1583 #define __FUNCT__ "DMGetRefineLevel" 1584 /*@ 1585 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1586 1587 Not Collective 1588 1589 Input Parameter: 1590 . dm - the DM object 1591 1592 Output Parameter: 1593 . level - number of refinements 1594 1595 Level: developer 1596 1597 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1598 1599 @*/ 1600 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1601 { 1602 PetscFunctionBegin; 1603 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1604 *level = dm->levelup; 1605 PetscFunctionReturn(0); 1606 } 1607 1608 #undef __FUNCT__ 1609 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1610 /*@C 1611 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1612 1613 Logically Collective 1614 1615 Input Arguments: 1616 + dm - the DM 1617 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1618 . endhook - function to run after DMGlobalToLocalEnd() has completed 1619 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1620 1621 Calling sequence for beginhook: 1622 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1623 1624 + dm - global DM 1625 . g - global vector 1626 . mode - mode 1627 . l - local vector 1628 - ctx - optional user-defined function context 1629 1630 1631 Calling sequence for endhook: 1632 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1633 1634 + global - global DM 1635 - ctx - optional user-defined function context 1636 1637 Level: advanced 1638 1639 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1640 @*/ 1641 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1642 { 1643 PetscErrorCode ierr; 1644 DMGlobalToLocalHookLink link,*p; 1645 1646 PetscFunctionBegin; 1647 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1648 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1649 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1650 link->beginhook = beginhook; 1651 link->endhook = endhook; 1652 link->ctx = ctx; 1653 link->next = NULL; 1654 *p = link; 1655 PetscFunctionReturn(0); 1656 } 1657 1658 #undef __FUNCT__ 1659 #define __FUNCT__ "DMGlobalToLocalBegin" 1660 /*@ 1661 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1662 1663 Neighbor-wise Collective on DM 1664 1665 Input Parameters: 1666 + dm - the DM object 1667 . g - the global vector 1668 . mode - INSERT_VALUES or ADD_VALUES 1669 - l - the local vector 1670 1671 1672 Level: beginner 1673 1674 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1675 1676 @*/ 1677 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1678 { 1679 PetscSF sf; 1680 PetscErrorCode ierr; 1681 DMGlobalToLocalHookLink link; 1682 1683 PetscFunctionBegin; 1684 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1685 for (link=dm->gtolhook; link; link=link->next) { 1686 if (link->beginhook) { 1687 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1688 } 1689 } 1690 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1691 if (sf) { 1692 PetscScalar *lArray, *gArray; 1693 1694 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1695 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1696 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1697 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1698 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1699 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1700 } else { 1701 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1702 } 1703 PetscFunctionReturn(0); 1704 } 1705 1706 #undef __FUNCT__ 1707 #define __FUNCT__ "DMGlobalToLocalEnd" 1708 /*@ 1709 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1710 1711 Neighbor-wise Collective on DM 1712 1713 Input Parameters: 1714 + dm - the DM object 1715 . g - the global vector 1716 . mode - INSERT_VALUES or ADD_VALUES 1717 - l - the local vector 1718 1719 1720 Level: beginner 1721 1722 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1723 1724 @*/ 1725 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1726 { 1727 PetscSF sf; 1728 PetscErrorCode ierr; 1729 PetscScalar *lArray, *gArray; 1730 DMGlobalToLocalHookLink link; 1731 1732 PetscFunctionBegin; 1733 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1734 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1735 if (sf) { 1736 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1737 1738 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1739 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1740 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1741 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1742 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1743 } else { 1744 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1745 } 1746 for (link=dm->gtolhook; link; link=link->next) { 1747 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1748 } 1749 PetscFunctionReturn(0); 1750 } 1751 1752 #undef __FUNCT__ 1753 #define __FUNCT__ "DMLocalToGlobalBegin" 1754 /*@ 1755 DMLocalToGlobalBegin - updates global vectors from local vectors 1756 1757 Neighbor-wise Collective on DM 1758 1759 Input Parameters: 1760 + dm - the DM object 1761 . l - the local vector 1762 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1763 base point. 1764 - - the global vector 1765 1766 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1767 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1768 global array to the final global array with VecAXPY(). 1769 1770 Level: beginner 1771 1772 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1773 1774 @*/ 1775 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1776 { 1777 PetscSF sf; 1778 PetscErrorCode ierr; 1779 1780 PetscFunctionBegin; 1781 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1782 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1783 if (sf) { 1784 MPI_Op op; 1785 PetscScalar *lArray, *gArray; 1786 1787 switch (mode) { 1788 case INSERT_VALUES: 1789 case INSERT_ALL_VALUES: 1790 op = MPIU_REPLACE; break; 1791 case ADD_VALUES: 1792 case ADD_ALL_VALUES: 1793 op = MPI_SUM; break; 1794 default: 1795 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1796 } 1797 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1798 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1799 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1800 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1801 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1802 } else { 1803 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1804 } 1805 PetscFunctionReturn(0); 1806 } 1807 1808 #undef __FUNCT__ 1809 #define __FUNCT__ "DMLocalToGlobalEnd" 1810 /*@ 1811 DMLocalToGlobalEnd - updates global vectors from local vectors 1812 1813 Neighbor-wise Collective on DM 1814 1815 Input Parameters: 1816 + dm - the DM object 1817 . l - the local vector 1818 . mode - INSERT_VALUES or ADD_VALUES 1819 - g - the global vector 1820 1821 1822 Level: beginner 1823 1824 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1825 1826 @*/ 1827 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1828 { 1829 PetscSF sf; 1830 PetscErrorCode ierr; 1831 1832 PetscFunctionBegin; 1833 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1834 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1835 if (sf) { 1836 MPI_Op op; 1837 PetscScalar *lArray, *gArray; 1838 1839 switch (mode) { 1840 case INSERT_VALUES: 1841 case INSERT_ALL_VALUES: 1842 op = MPIU_REPLACE; break; 1843 case ADD_VALUES: 1844 case ADD_ALL_VALUES: 1845 op = MPI_SUM; break; 1846 default: 1847 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1848 } 1849 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1850 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1851 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1852 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1853 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1854 } else { 1855 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1856 } 1857 PetscFunctionReturn(0); 1858 } 1859 1860 #undef __FUNCT__ 1861 #define __FUNCT__ "DMLocalToLocalBegin" 1862 /*@ 1863 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1864 that contain irrelevant values) to another local vector where the ghost 1865 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1866 1867 Neighbor-wise Collective on DM and Vec 1868 1869 Input Parameters: 1870 + dm - the DM object 1871 . g - the original local vector 1872 - mode - one of INSERT_VALUES or ADD_VALUES 1873 1874 Output Parameter: 1875 . l - the local vector with correct ghost values 1876 1877 Level: intermediate 1878 1879 Notes: 1880 The local vectors used here need not be the same as those 1881 obtained from DMCreateLocalVector(), BUT they 1882 must have the same parallel data layout; they could, for example, be 1883 obtained with VecDuplicate() from the DM originating vectors. 1884 1885 .keywords: DM, local-to-local, begin 1886 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1887 1888 @*/ 1889 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1890 { 1891 PetscErrorCode ierr; 1892 1893 PetscFunctionBegin; 1894 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1895 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1896 PetscFunctionReturn(0); 1897 } 1898 1899 #undef __FUNCT__ 1900 #define __FUNCT__ "DMLocalToLocalEnd" 1901 /*@ 1902 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1903 that contain irrelevant values) to another local vector where the ghost 1904 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1905 1906 Neighbor-wise Collective on DM and Vec 1907 1908 Input Parameters: 1909 + da - the DM object 1910 . g - the original local vector 1911 - mode - one of INSERT_VALUES or ADD_VALUES 1912 1913 Output Parameter: 1914 . l - the local vector with correct ghost values 1915 1916 Level: intermediate 1917 1918 Notes: 1919 The local vectors used here need not be the same as those 1920 obtained from DMCreateLocalVector(), BUT they 1921 must have the same parallel data layout; they could, for example, be 1922 obtained with VecDuplicate() from the DM originating vectors. 1923 1924 .keywords: DM, local-to-local, end 1925 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1926 1927 @*/ 1928 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1929 { 1930 PetscErrorCode ierr; 1931 1932 PetscFunctionBegin; 1933 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1934 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1935 PetscFunctionReturn(0); 1936 } 1937 1938 1939 #undef __FUNCT__ 1940 #define __FUNCT__ "DMCoarsen" 1941 /*@ 1942 DMCoarsen - Coarsens a DM object 1943 1944 Collective on DM 1945 1946 Input Parameter: 1947 + dm - the DM object 1948 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1949 1950 Output Parameter: 1951 . dmc - the coarsened DM 1952 1953 Level: developer 1954 1955 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1956 1957 @*/ 1958 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1959 { 1960 PetscErrorCode ierr; 1961 DMCoarsenHookLink link; 1962 1963 PetscFunctionBegin; 1964 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1965 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1966 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1967 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1968 (*dmc)->ctx = dm->ctx; 1969 (*dmc)->levelup = dm->levelup; 1970 (*dmc)->leveldown = dm->leveldown + 1; 1971 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1972 for (link=dm->coarsenhook; link; link=link->next) { 1973 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1974 } 1975 PetscFunctionReturn(0); 1976 } 1977 1978 #undef __FUNCT__ 1979 #define __FUNCT__ "DMCoarsenHookAdd" 1980 /*@C 1981 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1982 1983 Logically Collective 1984 1985 Input Arguments: 1986 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1987 . coarsenhook - function to run when setting up a coarser level 1988 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1989 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1990 1991 Calling sequence of coarsenhook: 1992 $ coarsenhook(DM fine,DM coarse,void *ctx); 1993 1994 + fine - fine level DM 1995 . coarse - coarse level DM to restrict problem to 1996 - ctx - optional user-defined function context 1997 1998 Calling sequence for restricthook: 1999 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 2000 2001 + fine - fine level DM 2002 . mrestrict - matrix restricting a fine-level solution to the coarse grid 2003 . rscale - scaling vector for restriction 2004 . inject - matrix restricting by injection 2005 . coarse - coarse level DM to update 2006 - ctx - optional user-defined function context 2007 2008 Level: advanced 2009 2010 Notes: 2011 This function is only needed if auxiliary data needs to be set up on coarse grids. 2012 2013 If this function is called multiple times, the hooks will be run in the order they are added. 2014 2015 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2016 extract the finest level information from its context (instead of from the SNES). 2017 2018 This function is currently not available from Fortran. 2019 2020 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2021 @*/ 2022 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 2023 { 2024 PetscErrorCode ierr; 2025 DMCoarsenHookLink link,*p; 2026 2027 PetscFunctionBegin; 2028 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2029 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2030 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2031 link->coarsenhook = coarsenhook; 2032 link->restricthook = restricthook; 2033 link->ctx = ctx; 2034 link->next = NULL; 2035 *p = link; 2036 PetscFunctionReturn(0); 2037 } 2038 2039 #undef __FUNCT__ 2040 #define __FUNCT__ "DMRestrict" 2041 /*@ 2042 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2043 2044 Collective if any hooks are 2045 2046 Input Arguments: 2047 + fine - finer DM to use as a base 2048 . restrct - restriction matrix, apply using MatRestrict() 2049 . inject - injection matrix, also use MatRestrict() 2050 - coarse - coarer DM to update 2051 2052 Level: developer 2053 2054 .seealso: DMCoarsenHookAdd(), MatRestrict() 2055 @*/ 2056 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2057 { 2058 PetscErrorCode ierr; 2059 DMCoarsenHookLink link; 2060 2061 PetscFunctionBegin; 2062 for (link=fine->coarsenhook; link; link=link->next) { 2063 if (link->restricthook) { 2064 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2065 } 2066 } 2067 PetscFunctionReturn(0); 2068 } 2069 2070 #undef __FUNCT__ 2071 #define __FUNCT__ "DMSubDomainHookAdd" 2072 /*@C 2073 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2074 2075 Logically Collective 2076 2077 Input Arguments: 2078 + global - global DM 2079 . ddhook - function to run to pass data to the decomposition DM upon its creation 2080 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2081 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2082 2083 2084 Calling sequence for ddhook: 2085 $ ddhook(DM global,DM block,void *ctx) 2086 2087 + global - global DM 2088 . block - block DM 2089 - ctx - optional user-defined function context 2090 2091 Calling sequence for restricthook: 2092 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2093 2094 + global - global DM 2095 . out - scatter to the outer (with ghost and overlap points) block vector 2096 . in - scatter to block vector values only owned locally 2097 . block - block DM 2098 - ctx - optional user-defined function context 2099 2100 Level: advanced 2101 2102 Notes: 2103 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2104 2105 If this function is called multiple times, the hooks will be run in the order they are added. 2106 2107 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2108 extract the global information from its context (instead of from the SNES). 2109 2110 This function is currently not available from Fortran. 2111 2112 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2113 @*/ 2114 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2115 { 2116 PetscErrorCode ierr; 2117 DMSubDomainHookLink link,*p; 2118 2119 PetscFunctionBegin; 2120 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2121 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2122 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2123 link->restricthook = restricthook; 2124 link->ddhook = ddhook; 2125 link->ctx = ctx; 2126 link->next = NULL; 2127 *p = link; 2128 PetscFunctionReturn(0); 2129 } 2130 2131 #undef __FUNCT__ 2132 #define __FUNCT__ "DMSubDomainRestrict" 2133 /*@ 2134 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2135 2136 Collective if any hooks are 2137 2138 Input Arguments: 2139 + fine - finer DM to use as a base 2140 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2141 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2142 - coarse - coarer DM to update 2143 2144 Level: developer 2145 2146 .seealso: DMCoarsenHookAdd(), MatRestrict() 2147 @*/ 2148 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2149 { 2150 PetscErrorCode ierr; 2151 DMSubDomainHookLink link; 2152 2153 PetscFunctionBegin; 2154 for (link=global->subdomainhook; link; link=link->next) { 2155 if (link->restricthook) { 2156 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2157 } 2158 } 2159 PetscFunctionReturn(0); 2160 } 2161 2162 #undef __FUNCT__ 2163 #define __FUNCT__ "DMGetCoarsenLevel" 2164 /*@ 2165 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2166 2167 Not Collective 2168 2169 Input Parameter: 2170 . dm - the DM object 2171 2172 Output Parameter: 2173 . level - number of coarsenings 2174 2175 Level: developer 2176 2177 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2178 2179 @*/ 2180 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2181 { 2182 PetscFunctionBegin; 2183 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2184 *level = dm->leveldown; 2185 PetscFunctionReturn(0); 2186 } 2187 2188 2189 2190 #undef __FUNCT__ 2191 #define __FUNCT__ "DMRefineHierarchy" 2192 /*@C 2193 DMRefineHierarchy - Refines a DM object, all levels at once 2194 2195 Collective on DM 2196 2197 Input Parameter: 2198 + dm - the DM object 2199 - nlevels - the number of levels of refinement 2200 2201 Output Parameter: 2202 . dmf - the refined DM hierarchy 2203 2204 Level: developer 2205 2206 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2207 2208 @*/ 2209 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2210 { 2211 PetscErrorCode ierr; 2212 2213 PetscFunctionBegin; 2214 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2215 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2216 if (nlevels == 0) PetscFunctionReturn(0); 2217 if (dm->ops->refinehierarchy) { 2218 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2219 } else if (dm->ops->refine) { 2220 PetscInt i; 2221 2222 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2223 for (i=1; i<nlevels; i++) { 2224 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2225 } 2226 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2227 PetscFunctionReturn(0); 2228 } 2229 2230 #undef __FUNCT__ 2231 #define __FUNCT__ "DMCoarsenHierarchy" 2232 /*@C 2233 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2234 2235 Collective on DM 2236 2237 Input Parameter: 2238 + dm - the DM object 2239 - nlevels - the number of levels of coarsening 2240 2241 Output Parameter: 2242 . dmc - the coarsened DM hierarchy 2243 2244 Level: developer 2245 2246 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2247 2248 @*/ 2249 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2250 { 2251 PetscErrorCode ierr; 2252 2253 PetscFunctionBegin; 2254 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2255 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2256 if (nlevels == 0) PetscFunctionReturn(0); 2257 PetscValidPointer(dmc,3); 2258 if (dm->ops->coarsenhierarchy) { 2259 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2260 } else if (dm->ops->coarsen) { 2261 PetscInt i; 2262 2263 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2264 for (i=1; i<nlevels; i++) { 2265 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2266 } 2267 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2268 PetscFunctionReturn(0); 2269 } 2270 2271 #undef __FUNCT__ 2272 #define __FUNCT__ "DMCreateAggregates" 2273 /*@ 2274 DMCreateAggregates - Gets the aggregates that map between 2275 grids associated with two DMs. 2276 2277 Collective on DM 2278 2279 Input Parameters: 2280 + dmc - the coarse grid DM 2281 - dmf - the fine grid DM 2282 2283 Output Parameters: 2284 . rest - the restriction matrix (transpose of the projection matrix) 2285 2286 Level: intermediate 2287 2288 .keywords: interpolation, restriction, multigrid 2289 2290 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2291 @*/ 2292 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2293 { 2294 PetscErrorCode ierr; 2295 2296 PetscFunctionBegin; 2297 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2298 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2299 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2300 PetscFunctionReturn(0); 2301 } 2302 2303 #undef __FUNCT__ 2304 #define __FUNCT__ "DMSetApplicationContextDestroy" 2305 /*@C 2306 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2307 2308 Not Collective 2309 2310 Input Parameters: 2311 + dm - the DM object 2312 - destroy - the destroy function 2313 2314 Level: intermediate 2315 2316 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2317 2318 @*/ 2319 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2320 { 2321 PetscFunctionBegin; 2322 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2323 dm->ctxdestroy = destroy; 2324 PetscFunctionReturn(0); 2325 } 2326 2327 #undef __FUNCT__ 2328 #define __FUNCT__ "DMSetApplicationContext" 2329 /*@ 2330 DMSetApplicationContext - Set a user context into a DM object 2331 2332 Not Collective 2333 2334 Input Parameters: 2335 + dm - the DM object 2336 - ctx - the user context 2337 2338 Level: intermediate 2339 2340 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2341 2342 @*/ 2343 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2344 { 2345 PetscFunctionBegin; 2346 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2347 dm->ctx = ctx; 2348 PetscFunctionReturn(0); 2349 } 2350 2351 #undef __FUNCT__ 2352 #define __FUNCT__ "DMGetApplicationContext" 2353 /*@ 2354 DMGetApplicationContext - Gets a user context from a DM object 2355 2356 Not Collective 2357 2358 Input Parameter: 2359 . dm - the DM object 2360 2361 Output Parameter: 2362 . ctx - the user context 2363 2364 Level: intermediate 2365 2366 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2367 2368 @*/ 2369 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2370 { 2371 PetscFunctionBegin; 2372 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2373 *(void**)ctx = dm->ctx; 2374 PetscFunctionReturn(0); 2375 } 2376 2377 #undef __FUNCT__ 2378 #define __FUNCT__ "DMSetVariableBounds" 2379 /*@C 2380 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2381 2382 Logically Collective on DM 2383 2384 Input Parameter: 2385 + dm - the DM object 2386 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2387 2388 Level: intermediate 2389 2390 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2391 DMSetJacobian() 2392 2393 @*/ 2394 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2395 { 2396 PetscFunctionBegin; 2397 dm->ops->computevariablebounds = f; 2398 PetscFunctionReturn(0); 2399 } 2400 2401 #undef __FUNCT__ 2402 #define __FUNCT__ "DMHasVariableBounds" 2403 /*@ 2404 DMHasVariableBounds - does the DM object have a variable bounds function? 2405 2406 Not Collective 2407 2408 Input Parameter: 2409 . dm - the DM object to destroy 2410 2411 Output Parameter: 2412 . flg - PETSC_TRUE if the variable bounds function exists 2413 2414 Level: developer 2415 2416 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2417 2418 @*/ 2419 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2420 { 2421 PetscFunctionBegin; 2422 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2423 PetscFunctionReturn(0); 2424 } 2425 2426 #undef __FUNCT__ 2427 #define __FUNCT__ "DMComputeVariableBounds" 2428 /*@C 2429 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2430 2431 Logically Collective on DM 2432 2433 Input Parameters: 2434 + dm - the DM object to destroy 2435 - x - current solution at which the bounds are computed 2436 2437 Output parameters: 2438 + xl - lower bound 2439 - xu - upper bound 2440 2441 Level: intermediate 2442 2443 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2444 2445 @*/ 2446 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2447 { 2448 PetscErrorCode ierr; 2449 2450 PetscFunctionBegin; 2451 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2452 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2453 if (dm->ops->computevariablebounds) { 2454 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2455 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2456 PetscFunctionReturn(0); 2457 } 2458 2459 #undef __FUNCT__ 2460 #define __FUNCT__ "DMHasColoring" 2461 /*@ 2462 DMHasColoring - does the DM object have a method of providing a coloring? 2463 2464 Not Collective 2465 2466 Input Parameter: 2467 . dm - the DM object 2468 2469 Output Parameter: 2470 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2471 2472 Level: developer 2473 2474 .seealso DMHasFunction(), DMCreateColoring() 2475 2476 @*/ 2477 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2478 { 2479 PetscFunctionBegin; 2480 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2481 PetscFunctionReturn(0); 2482 } 2483 2484 #undef __FUNCT__ 2485 #define __FUNCT__ "DMSetVec" 2486 /*@C 2487 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2488 2489 Collective on DM 2490 2491 Input Parameter: 2492 + dm - the DM object 2493 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2494 2495 Level: developer 2496 2497 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2498 2499 @*/ 2500 PetscErrorCode DMSetVec(DM dm,Vec x) 2501 { 2502 PetscErrorCode ierr; 2503 2504 PetscFunctionBegin; 2505 if (x) { 2506 if (!dm->x) { 2507 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2508 } 2509 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2510 } else if (dm->x) { 2511 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2512 } 2513 PetscFunctionReturn(0); 2514 } 2515 2516 PetscFunctionList DMList = NULL; 2517 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2518 2519 #undef __FUNCT__ 2520 #define __FUNCT__ "DMSetType" 2521 /*@C 2522 DMSetType - Builds a DM, for a particular DM implementation. 2523 2524 Collective on DM 2525 2526 Input Parameters: 2527 + dm - The DM object 2528 - method - The name of the DM type 2529 2530 Options Database Key: 2531 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2532 2533 Notes: 2534 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2535 2536 Level: intermediate 2537 2538 .keywords: DM, set, type 2539 .seealso: DMGetType(), DMCreate() 2540 @*/ 2541 PetscErrorCode DMSetType(DM dm, DMType method) 2542 { 2543 PetscErrorCode (*r)(DM); 2544 PetscBool match; 2545 PetscErrorCode ierr; 2546 2547 PetscFunctionBegin; 2548 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2549 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2550 if (match) PetscFunctionReturn(0); 2551 2552 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2553 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2554 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2555 2556 if (dm->ops->destroy) { 2557 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2558 dm->ops->destroy = NULL; 2559 } 2560 ierr = (*r)(dm);CHKERRQ(ierr); 2561 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2562 PetscFunctionReturn(0); 2563 } 2564 2565 #undef __FUNCT__ 2566 #define __FUNCT__ "DMGetType" 2567 /*@C 2568 DMGetType - Gets the DM type name (as a string) from the DM. 2569 2570 Not Collective 2571 2572 Input Parameter: 2573 . dm - The DM 2574 2575 Output Parameter: 2576 . type - The DM type name 2577 2578 Level: intermediate 2579 2580 .keywords: DM, get, type, name 2581 .seealso: DMSetType(), DMCreate() 2582 @*/ 2583 PetscErrorCode DMGetType(DM dm, DMType *type) 2584 { 2585 PetscErrorCode ierr; 2586 2587 PetscFunctionBegin; 2588 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2589 PetscValidCharPointer(type,2); 2590 if (!DMRegisterAllCalled) { 2591 ierr = DMRegisterAll();CHKERRQ(ierr); 2592 } 2593 *type = ((PetscObject)dm)->type_name; 2594 PetscFunctionReturn(0); 2595 } 2596 2597 #undef __FUNCT__ 2598 #define __FUNCT__ "DMConvert" 2599 /*@C 2600 DMConvert - Converts a DM to another DM, either of the same or different type. 2601 2602 Collective on DM 2603 2604 Input Parameters: 2605 + dm - the DM 2606 - newtype - new DM type (use "same" for the same type) 2607 2608 Output Parameter: 2609 . M - pointer to new DM 2610 2611 Notes: 2612 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2613 the MPI communicator of the generated DM is always the same as the communicator 2614 of the input DM. 2615 2616 Level: intermediate 2617 2618 .seealso: DMCreate() 2619 @*/ 2620 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2621 { 2622 DM B; 2623 char convname[256]; 2624 PetscBool sametype, issame; 2625 PetscErrorCode ierr; 2626 2627 PetscFunctionBegin; 2628 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2629 PetscValidType(dm,1); 2630 PetscValidPointer(M,3); 2631 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2632 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2633 { 2634 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2635 2636 /* 2637 Order of precedence: 2638 1) See if a specialized converter is known to the current DM. 2639 2) See if a specialized converter is known to the desired DM class. 2640 3) See if a good general converter is registered for the desired class 2641 4) See if a good general converter is known for the current matrix. 2642 5) Use a really basic converter. 2643 */ 2644 2645 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2646 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2647 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2648 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2649 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2650 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2651 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2652 if (conv) goto foundconv; 2653 2654 /* 2) See if a specialized converter is known to the desired DM class. */ 2655 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2656 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2657 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2658 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2659 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2660 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2661 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2662 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2663 if (conv) { 2664 ierr = DMDestroy(&B);CHKERRQ(ierr); 2665 goto foundconv; 2666 } 2667 2668 #if 0 2669 /* 3) See if a good general converter is registered for the desired class */ 2670 conv = B->ops->convertfrom; 2671 ierr = DMDestroy(&B);CHKERRQ(ierr); 2672 if (conv) goto foundconv; 2673 2674 /* 4) See if a good general converter is known for the current matrix */ 2675 if (dm->ops->convert) { 2676 conv = dm->ops->convert; 2677 } 2678 if (conv) goto foundconv; 2679 #endif 2680 2681 /* 5) Use a really basic converter. */ 2682 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2683 2684 foundconv: 2685 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2686 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2687 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2688 } 2689 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2690 PetscFunctionReturn(0); 2691 } 2692 2693 /*--------------------------------------------------------------------------------------------------------------------*/ 2694 2695 #undef __FUNCT__ 2696 #define __FUNCT__ "DMRegister" 2697 /*@C 2698 DMRegister - Adds a new DM component implementation 2699 2700 Not Collective 2701 2702 Input Parameters: 2703 + name - The name of a new user-defined creation routine 2704 - create_func - The creation routine itself 2705 2706 Notes: 2707 DMRegister() may be called multiple times to add several user-defined DMs 2708 2709 2710 Sample usage: 2711 .vb 2712 DMRegister("my_da", MyDMCreate); 2713 .ve 2714 2715 Then, your DM type can be chosen with the procedural interface via 2716 .vb 2717 DMCreate(MPI_Comm, DM *); 2718 DMSetType(DM,"my_da"); 2719 .ve 2720 or at runtime via the option 2721 .vb 2722 -da_type my_da 2723 .ve 2724 2725 Level: advanced 2726 2727 .keywords: DM, register 2728 .seealso: DMRegisterAll(), DMRegisterDestroy() 2729 2730 @*/ 2731 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2732 { 2733 PetscErrorCode ierr; 2734 2735 PetscFunctionBegin; 2736 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2737 PetscFunctionReturn(0); 2738 } 2739 2740 #undef __FUNCT__ 2741 #define __FUNCT__ "DMLoad" 2742 /*@C 2743 DMLoad - Loads a DM that has been stored in binary with DMView(). 2744 2745 Collective on PetscViewer 2746 2747 Input Parameters: 2748 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2749 some related function before a call to DMLoad(). 2750 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2751 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2752 2753 Level: intermediate 2754 2755 Notes: 2756 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2757 2758 Notes for advanced users: 2759 Most users should not need to know the details of the binary storage 2760 format, since DMLoad() and DMView() completely hide these details. 2761 But for anyone who's interested, the standard binary matrix storage 2762 format is 2763 .vb 2764 has not yet been determined 2765 .ve 2766 2767 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2768 @*/ 2769 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2770 { 2771 PetscBool isbinary, ishdf5; 2772 PetscErrorCode ierr; 2773 2774 PetscFunctionBegin; 2775 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2776 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2777 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2778 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 2779 if (isbinary) { 2780 PetscInt classid; 2781 char type[256]; 2782 2783 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2784 if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid); 2785 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2786 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2787 if (newdm->ops->load) {ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);} 2788 } else if (ishdf5) { 2789 if (newdm->ops->load) {ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);} 2790 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen() or PetscViewerHDF5Open()"); 2791 PetscFunctionReturn(0); 2792 } 2793 2794 /******************************** FEM Support **********************************/ 2795 2796 #undef __FUNCT__ 2797 #define __FUNCT__ "DMPrintCellVector" 2798 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2799 { 2800 PetscInt f; 2801 PetscErrorCode ierr; 2802 2803 PetscFunctionBegin; 2804 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2805 for (f = 0; f < len; ++f) { 2806 ierr = PetscPrintf(PETSC_COMM_SELF, " | %g |\n", (double)PetscRealPart(x[f]));CHKERRQ(ierr); 2807 } 2808 PetscFunctionReturn(0); 2809 } 2810 2811 #undef __FUNCT__ 2812 #define __FUNCT__ "DMPrintCellMatrix" 2813 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2814 { 2815 PetscInt f, g; 2816 PetscErrorCode ierr; 2817 2818 PetscFunctionBegin; 2819 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2820 for (f = 0; f < rows; ++f) { 2821 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2822 for (g = 0; g < cols; ++g) { 2823 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5g", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2824 } 2825 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2826 } 2827 PetscFunctionReturn(0); 2828 } 2829 2830 #undef __FUNCT__ 2831 #define __FUNCT__ "DMPrintLocalVec" 2832 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X) 2833 { 2834 PetscMPIInt rank, numProcs; 2835 PetscInt p; 2836 PetscErrorCode ierr; 2837 2838 PetscFunctionBegin; 2839 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2840 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 2841 ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr); 2842 for (p = 0; p < numProcs; ++p) { 2843 if (p == rank) { 2844 Vec x; 2845 2846 ierr = VecDuplicate(X, &x);CHKERRQ(ierr); 2847 ierr = VecCopy(X, x);CHKERRQ(ierr); 2848 ierr = VecChop(x, tol);CHKERRQ(ierr); 2849 ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2850 ierr = VecDestroy(&x);CHKERRQ(ierr); 2851 ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2852 } 2853 ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr); 2854 } 2855 PetscFunctionReturn(0); 2856 } 2857 2858 #undef __FUNCT__ 2859 #define __FUNCT__ "DMGetDefaultSection" 2860 /*@ 2861 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2862 2863 Input Parameter: 2864 . dm - The DM 2865 2866 Output Parameter: 2867 . section - The PetscSection 2868 2869 Level: intermediate 2870 2871 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2872 2873 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2874 @*/ 2875 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2876 { 2877 PetscErrorCode ierr; 2878 2879 PetscFunctionBegin; 2880 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2881 PetscValidPointer(section, 2); 2882 if (!dm->defaultSection && dm->ops->createdefaultsection) {ierr = (*dm->ops->createdefaultsection)(dm);CHKERRQ(ierr);} 2883 *section = dm->defaultSection; 2884 PetscFunctionReturn(0); 2885 } 2886 2887 #undef __FUNCT__ 2888 #define __FUNCT__ "DMSetDefaultSection" 2889 /*@ 2890 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2891 2892 Input Parameters: 2893 + dm - The DM 2894 - section - The PetscSection 2895 2896 Level: intermediate 2897 2898 Note: Any existing Section will be destroyed 2899 2900 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2901 @*/ 2902 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2903 { 2904 PetscInt numFields; 2905 PetscInt f; 2906 PetscErrorCode ierr; 2907 2908 PetscFunctionBegin; 2909 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2910 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2911 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2912 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2913 dm->defaultSection = section; 2914 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2915 if (numFields) { 2916 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2917 for (f = 0; f < numFields; ++f) { 2918 const char *name; 2919 2920 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2921 ierr = PetscObjectSetName((PetscObject) dm->fields[f], name);CHKERRQ(ierr); 2922 } 2923 } 2924 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2925 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2926 PetscFunctionReturn(0); 2927 } 2928 2929 #undef __FUNCT__ 2930 #define __FUNCT__ "DMGetDefaultGlobalSection" 2931 /*@ 2932 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2933 2934 Collective on DM 2935 2936 Input Parameter: 2937 . dm - The DM 2938 2939 Output Parameter: 2940 . section - The PetscSection 2941 2942 Level: intermediate 2943 2944 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2945 2946 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2947 @*/ 2948 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2949 { 2950 PetscErrorCode ierr; 2951 2952 PetscFunctionBegin; 2953 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2954 PetscValidPointer(section, 2); 2955 if (!dm->defaultGlobalSection) { 2956 PetscSection s; 2957 2958 ierr = DMGetDefaultSection(dm, &s);CHKERRQ(ierr); 2959 if (!s) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection in order to create a global PetscSection"); 2960 if (!dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSF in order to create a global PetscSection"); 2961 ierr = PetscSectionCreateGlobalSection(s, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2962 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2963 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm), dm->defaultGlobalSection, &dm->map);CHKERRQ(ierr); 2964 } 2965 *section = dm->defaultGlobalSection; 2966 PetscFunctionReturn(0); 2967 } 2968 2969 #undef __FUNCT__ 2970 #define __FUNCT__ "DMSetDefaultGlobalSection" 2971 /*@ 2972 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2973 2974 Input Parameters: 2975 + dm - The DM 2976 - section - The PetscSection, or NULL 2977 2978 Level: intermediate 2979 2980 Note: Any existing Section will be destroyed 2981 2982 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2983 @*/ 2984 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2985 { 2986 PetscErrorCode ierr; 2987 2988 PetscFunctionBegin; 2989 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2990 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2991 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2992 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2993 dm->defaultGlobalSection = section; 2994 PetscFunctionReturn(0); 2995 } 2996 2997 #undef __FUNCT__ 2998 #define __FUNCT__ "DMGetDefaultSF" 2999 /*@ 3000 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3001 it is created from the default PetscSection layouts in the DM. 3002 3003 Input Parameter: 3004 . dm - The DM 3005 3006 Output Parameter: 3007 . sf - The PetscSF 3008 3009 Level: intermediate 3010 3011 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3012 3013 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3014 @*/ 3015 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 3016 { 3017 PetscInt nroots; 3018 PetscErrorCode ierr; 3019 3020 PetscFunctionBegin; 3021 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3022 PetscValidPointer(sf, 2); 3023 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 3024 if (nroots < 0) { 3025 PetscSection section, gSection; 3026 3027 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3028 if (section) { 3029 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3030 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3031 } else { 3032 *sf = NULL; 3033 PetscFunctionReturn(0); 3034 } 3035 } 3036 *sf = dm->defaultSF; 3037 PetscFunctionReturn(0); 3038 } 3039 3040 #undef __FUNCT__ 3041 #define __FUNCT__ "DMSetDefaultSF" 3042 /*@ 3043 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3044 3045 Input Parameters: 3046 + dm - The DM 3047 - sf - The PetscSF 3048 3049 Level: intermediate 3050 3051 Note: Any previous SF is destroyed 3052 3053 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3054 @*/ 3055 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 3056 { 3057 PetscErrorCode ierr; 3058 3059 PetscFunctionBegin; 3060 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3061 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3062 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3063 dm->defaultSF = sf; 3064 PetscFunctionReturn(0); 3065 } 3066 3067 #undef __FUNCT__ 3068 #define __FUNCT__ "DMCreateDefaultSF" 3069 /*@C 3070 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3071 describing the data layout. 3072 3073 Input Parameters: 3074 + dm - The DM 3075 . localSection - PetscSection describing the local data layout 3076 - globalSection - PetscSection describing the global data layout 3077 3078 Level: intermediate 3079 3080 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3081 @*/ 3082 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3083 { 3084 MPI_Comm comm; 3085 PetscLayout layout; 3086 const PetscInt *ranges; 3087 PetscInt *local; 3088 PetscSFNode *remote; 3089 PetscInt pStart, pEnd, p, nroots, nleaves = 0, l; 3090 PetscMPIInt size, rank; 3091 PetscErrorCode ierr; 3092 3093 PetscFunctionBegin; 3094 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3095 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3096 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3097 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3098 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3099 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3100 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3101 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3102 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3103 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3104 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3105 for (p = pStart; p < pEnd; ++p) { 3106 PetscInt gdof, gcdof; 3107 3108 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3109 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3110 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3111 } 3112 ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr); 3113 ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr); 3114 for (p = pStart, l = 0; p < pEnd; ++p) { 3115 const PetscInt *cind; 3116 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3117 3118 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3119 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3120 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3121 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3122 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3123 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3124 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3125 if (!gdof) continue; /* Censored point */ 3126 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3127 if (gsize != dof-cdof) { 3128 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3129 cdof = 0; /* Ignore constraints */ 3130 } 3131 for (d = 0, c = 0; d < dof; ++d) { 3132 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3133 local[l+d-c] = off+d; 3134 } 3135 if (gdof < 0) { 3136 for (d = 0; d < gsize; ++d, ++l) { 3137 PetscInt offset = -(goff+1) + d, r; 3138 3139 ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr); 3140 if (r < 0) r = -(r+2); 3141 if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff); 3142 remote[l].rank = r; 3143 remote[l].index = offset - ranges[r]; 3144 } 3145 } else { 3146 for (d = 0; d < gsize; ++d, ++l) { 3147 remote[l].rank = rank; 3148 remote[l].index = goff+d - ranges[rank]; 3149 } 3150 } 3151 } 3152 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3153 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3154 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3155 PetscFunctionReturn(0); 3156 } 3157 3158 #undef __FUNCT__ 3159 #define __FUNCT__ "DMGetPointSF" 3160 /*@ 3161 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3162 3163 Input Parameter: 3164 . dm - The DM 3165 3166 Output Parameter: 3167 . sf - The PetscSF 3168 3169 Level: intermediate 3170 3171 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3172 3173 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3174 @*/ 3175 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3176 { 3177 PetscFunctionBegin; 3178 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3179 PetscValidPointer(sf, 2); 3180 *sf = dm->sf; 3181 PetscFunctionReturn(0); 3182 } 3183 3184 #undef __FUNCT__ 3185 #define __FUNCT__ "DMSetPointSF" 3186 /*@ 3187 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3188 3189 Input Parameters: 3190 + dm - The DM 3191 - sf - The PetscSF 3192 3193 Level: intermediate 3194 3195 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3196 @*/ 3197 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3198 { 3199 PetscErrorCode ierr; 3200 3201 PetscFunctionBegin; 3202 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3203 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3204 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3205 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3206 dm->sf = sf; 3207 PetscFunctionReturn(0); 3208 } 3209 3210 #undef __FUNCT__ 3211 #define __FUNCT__ "DMGetNumFields" 3212 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3213 { 3214 PetscFunctionBegin; 3215 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3216 PetscValidPointer(numFields, 2); 3217 *numFields = dm->numFields; 3218 PetscFunctionReturn(0); 3219 } 3220 3221 #undef __FUNCT__ 3222 #define __FUNCT__ "DMSetNumFields" 3223 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3224 { 3225 PetscInt f; 3226 PetscErrorCode ierr; 3227 3228 PetscFunctionBegin; 3229 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3230 if (dm->numFields == numFields) PetscFunctionReturn(0); 3231 for (f = 0; f < dm->numFields; ++f) { 3232 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3233 } 3234 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3235 dm->numFields = numFields; 3236 ierr = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr); 3237 for (f = 0; f < dm->numFields; ++f) { 3238 ierr = PetscContainerCreate(PetscObjectComm((PetscObject) dm), (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3239 } 3240 PetscFunctionReturn(0); 3241 } 3242 3243 #undef __FUNCT__ 3244 #define __FUNCT__ "DMGetField" 3245 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3246 { 3247 PetscFunctionBegin; 3248 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3249 PetscValidPointer(field, 3); 3250 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3251 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3252 *field = (PetscObject) dm->fields[f]; 3253 PetscFunctionReturn(0); 3254 } 3255 3256 #undef __FUNCT__ 3257 #define __FUNCT__ "DMSetField" 3258 PetscErrorCode DMSetField(DM dm, PetscInt f, PetscObject field) 3259 { 3260 PetscErrorCode ierr; 3261 3262 PetscFunctionBegin; 3263 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3264 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3265 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3266 if (((PetscObject) dm->fields[f]) == field) PetscFunctionReturn(0); 3267 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3268 dm->fields[f] = (PetscFE) field; 3269 ierr = PetscObjectReference((PetscObject) dm->fields[f]);CHKERRQ(ierr); 3270 PetscFunctionReturn(0); 3271 } 3272 3273 #undef __FUNCT__ 3274 #define __FUNCT__ "DMRestrictHook_Coordinates" 3275 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3276 { 3277 DM dm_coord,dmc_coord; 3278 PetscErrorCode ierr; 3279 Vec coords,ccoords; 3280 VecScatter scat; 3281 PetscFunctionBegin; 3282 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3283 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3284 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3285 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3286 if (coords && !ccoords) { 3287 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3288 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3289 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3290 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3291 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3292 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3293 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3294 } 3295 PetscFunctionReturn(0); 3296 } 3297 3298 #undef __FUNCT__ 3299 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3300 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3301 { 3302 DM dm_coord,subdm_coord; 3303 PetscErrorCode ierr; 3304 Vec coords,ccoords,clcoords; 3305 VecScatter *scat_i,*scat_g; 3306 PetscFunctionBegin; 3307 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3308 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3309 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3310 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3311 if (coords && !ccoords) { 3312 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3313 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3314 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3315 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3316 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3317 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3318 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3319 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3320 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3321 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3322 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3323 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3324 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3325 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3326 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3327 } 3328 PetscFunctionReturn(0); 3329 } 3330 3331 #undef __FUNCT__ 3332 #define __FUNCT__ "DMSetCoordinates" 3333 /*@ 3334 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3335 3336 Collective on DM 3337 3338 Input Parameters: 3339 + dm - the DM 3340 - c - coordinate vector 3341 3342 Note: 3343 The coordinates do include those for ghost points, which are in the local vector 3344 3345 Level: intermediate 3346 3347 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3348 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3349 @*/ 3350 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3351 { 3352 PetscErrorCode ierr; 3353 3354 PetscFunctionBegin; 3355 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3356 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3357 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3358 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3359 dm->coordinates = c; 3360 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3361 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3362 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3363 PetscFunctionReturn(0); 3364 } 3365 3366 #undef __FUNCT__ 3367 #define __FUNCT__ "DMSetCoordinatesLocal" 3368 /*@ 3369 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3370 3371 Collective on DM 3372 3373 Input Parameters: 3374 + dm - the DM 3375 - c - coordinate vector 3376 3377 Note: 3378 The coordinates of ghost points can be set using DMSetCoordinates() 3379 followed by DMGetCoordinatesLocal(). This is intended to enable the 3380 setting of ghost coordinates outside of the domain. 3381 3382 Level: intermediate 3383 3384 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3385 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3386 @*/ 3387 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3388 { 3389 PetscErrorCode ierr; 3390 3391 PetscFunctionBegin; 3392 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3393 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3394 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3395 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3396 3397 dm->coordinatesLocal = c; 3398 3399 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3400 PetscFunctionReturn(0); 3401 } 3402 3403 #undef __FUNCT__ 3404 #define __FUNCT__ "DMGetCoordinates" 3405 /*@ 3406 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3407 3408 Not Collective 3409 3410 Input Parameter: 3411 . dm - the DM 3412 3413 Output Parameter: 3414 . c - global coordinate vector 3415 3416 Note: 3417 This is a borrowed reference, so the user should NOT destroy this vector 3418 3419 Each process has only the local coordinates (does NOT have the ghost coordinates). 3420 3421 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3422 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3423 3424 Level: intermediate 3425 3426 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3427 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3428 @*/ 3429 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3430 { 3431 PetscErrorCode ierr; 3432 3433 PetscFunctionBegin; 3434 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3435 PetscValidPointer(c,2); 3436 if (!dm->coordinates && dm->coordinatesLocal) { 3437 DM cdm = NULL; 3438 3439 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3440 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3441 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3442 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3443 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3444 } 3445 *c = dm->coordinates; 3446 PetscFunctionReturn(0); 3447 } 3448 3449 #undef __FUNCT__ 3450 #define __FUNCT__ "DMGetCoordinatesLocal" 3451 /*@ 3452 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3453 3454 Collective on DM 3455 3456 Input Parameter: 3457 . dm - the DM 3458 3459 Output Parameter: 3460 . c - coordinate vector 3461 3462 Note: 3463 This is a borrowed reference, so the user should NOT destroy this vector 3464 3465 Each process has the local and ghost coordinates 3466 3467 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3468 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3469 3470 Level: intermediate 3471 3472 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3473 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3474 @*/ 3475 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3476 { 3477 PetscErrorCode ierr; 3478 3479 PetscFunctionBegin; 3480 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3481 PetscValidPointer(c,2); 3482 if (!dm->coordinatesLocal && dm->coordinates) { 3483 DM cdm = NULL; 3484 3485 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3486 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3487 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3488 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3489 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3490 } 3491 *c = dm->coordinatesLocal; 3492 PetscFunctionReturn(0); 3493 } 3494 3495 #undef __FUNCT__ 3496 #define __FUNCT__ "DMGetCoordinateDM" 3497 /*@ 3498 DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates 3499 3500 Collective on DM 3501 3502 Input Parameter: 3503 . dm - the DM 3504 3505 Output Parameter: 3506 . cdm - coordinate DM 3507 3508 Level: intermediate 3509 3510 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3511 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3512 @*/ 3513 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3514 { 3515 PetscErrorCode ierr; 3516 3517 PetscFunctionBegin; 3518 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3519 PetscValidPointer(cdm,2); 3520 if (!dm->coordinateDM) { 3521 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3522 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3523 } 3524 *cdm = dm->coordinateDM; 3525 PetscFunctionReturn(0); 3526 } 3527 3528 #undef __FUNCT__ 3529 #define __FUNCT__ "DMSetCoordinateDM" 3530 /*@ 3531 DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates 3532 3533 Logically Collective on DM 3534 3535 Input Parameters: 3536 + dm - the DM 3537 - cdm - coordinate DM 3538 3539 Level: intermediate 3540 3541 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3542 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3543 @*/ 3544 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm) 3545 { 3546 PetscErrorCode ierr; 3547 3548 PetscFunctionBegin; 3549 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3550 PetscValidHeaderSpecific(cdm,DM_CLASSID,2); 3551 ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr); 3552 dm->coordinateDM = cdm; 3553 ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr); 3554 PetscFunctionReturn(0); 3555 } 3556 3557 #undef __FUNCT__ 3558 #define __FUNCT__ "DMGetCoordinateSection" 3559 /*@ 3560 DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh. 3561 3562 Not Collective 3563 3564 Input Parameter: 3565 . dm - The DM object 3566 3567 Output Parameter: 3568 . section - The PetscSection object 3569 3570 Level: intermediate 3571 3572 .keywords: mesh, coordinates 3573 .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection() 3574 @*/ 3575 PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section) 3576 { 3577 DM cdm; 3578 PetscErrorCode ierr; 3579 3580 PetscFunctionBegin; 3581 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3582 PetscValidPointer(section, 2); 3583 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3584 ierr = DMGetDefaultSection(cdm, section);CHKERRQ(ierr); 3585 PetscFunctionReturn(0); 3586 } 3587 3588 #undef __FUNCT__ 3589 #define __FUNCT__ "DMSetCoordinateSection" 3590 /*@ 3591 DMSetCoordinateSection - Set the layout of coordinate values over the mesh. 3592 3593 Not Collective 3594 3595 Input Parameters: 3596 + dm - The DM object 3597 - section - The PetscSection object 3598 3599 Level: intermediate 3600 3601 .keywords: mesh, coordinates 3602 .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection() 3603 @*/ 3604 PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section) 3605 { 3606 DM cdm; 3607 PetscErrorCode ierr; 3608 3609 PetscFunctionBegin; 3610 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3611 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 3612 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3613 ierr = DMSetDefaultSection(cdm, section);CHKERRQ(ierr); 3614 PetscFunctionReturn(0); 3615 } 3616 3617 #undef __FUNCT__ 3618 #define __FUNCT__ "DMGetPeriodicity" 3619 PetscErrorCode DMGetPeriodicity(DM dm, const PetscReal **maxCell, const PetscReal **L) 3620 { 3621 PetscFunctionBegin; 3622 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3623 if (L) *L = dm->L; 3624 if (maxCell) *maxCell = dm->maxCell; 3625 PetscFunctionReturn(0); 3626 } 3627 3628 #undef __FUNCT__ 3629 #define __FUNCT__ "DMSetPeriodicity" 3630 PetscErrorCode DMSetPeriodicity(DM dm, const PetscReal maxCell[], const PetscReal L[]) 3631 { 3632 Vec coordinates; 3633 PetscInt dim, d; 3634 PetscErrorCode ierr; 3635 3636 PetscFunctionBegin; 3637 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3638 ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr); 3639 ierr = VecGetBlockSize(coordinates, &dim);CHKERRQ(ierr); 3640 ierr = PetscMalloc1(dim,&dm->L);CHKERRQ(ierr); 3641 ierr = PetscMalloc1(dim,&dm->maxCell);CHKERRQ(ierr); 3642 for (d = 0; d < dim; ++d) {dm->L[d] = L[d]; dm->maxCell[d] = maxCell[d];} 3643 PetscFunctionReturn(0); 3644 } 3645 3646 #undef __FUNCT__ 3647 #define __FUNCT__ "DMLocatePoints" 3648 /*@ 3649 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3650 3651 Not collective 3652 3653 Input Parameters: 3654 + dm - The DM 3655 - v - The Vec of points 3656 3657 Output Parameter: 3658 . cells - The local cell numbers for cells which contain the points 3659 3660 Level: developer 3661 3662 .keywords: point location, mesh 3663 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3664 @*/ 3665 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3666 { 3667 PetscErrorCode ierr; 3668 3669 PetscFunctionBegin; 3670 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3671 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3672 PetscValidPointer(cells,3); 3673 if (dm->ops->locatepoints) { 3674 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3675 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3676 PetscFunctionReturn(0); 3677 } 3678 3679 #undef __FUNCT__ 3680 #define __FUNCT__ "DMGetOutputDM" 3681 /*@ 3682 DMGetOutputDM - Retrieve the DM associated with the layout for output 3683 3684 Input Parameter: 3685 . dm - The original DM 3686 3687 Output Parameter: 3688 . odm - The DM which provides the layout for output 3689 3690 Level: intermediate 3691 3692 .seealso: VecView(), DMGetDefaultGlobalSection() 3693 @*/ 3694 PetscErrorCode DMGetOutputDM(DM dm, DM *odm) 3695 { 3696 PetscSection section; 3697 PetscBool hasConstraints; 3698 PetscErrorCode ierr; 3699 3700 PetscFunctionBegin; 3701 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3702 PetscValidPointer(odm,2); 3703 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3704 ierr = PetscSectionHasConstraints(section, &hasConstraints);CHKERRQ(ierr); 3705 if (!hasConstraints) { 3706 *odm = dm; 3707 PetscFunctionReturn(0); 3708 } 3709 if (!dm->dmBC) { 3710 PetscSection newSection, gsection; 3711 PetscSF sf; 3712 3713 ierr = DMClone(dm, &dm->dmBC);CHKERRQ(ierr); 3714 ierr = PetscSectionClone(section, &newSection);CHKERRQ(ierr); 3715 ierr = DMSetDefaultSection(dm->dmBC, newSection);CHKERRQ(ierr); 3716 ierr = PetscSectionDestroy(&newSection);CHKERRQ(ierr); 3717 ierr = DMGetPointSF(dm->dmBC, &sf);CHKERRQ(ierr); 3718 ierr = PetscSectionCreateGlobalSection(section, sf, PETSC_TRUE, &gsection);CHKERRQ(ierr); 3719 ierr = DMSetDefaultGlobalSection(dm->dmBC, gsection);CHKERRQ(ierr); 3720 ierr = PetscSectionDestroy(&gsection);CHKERRQ(ierr); 3721 } 3722 *odm = dm->dmBC; 3723 PetscFunctionReturn(0); 3724 } 3725 3726 #undef __FUNCT__ 3727 #define __FUNCT__ "DMGetOutputSequenceNumber" 3728 /*@ 3729 DMGetOutputSequenceNumber - Retrieve the sequence number for output 3730 3731 Input Parameter: 3732 . dm - The original DM 3733 3734 Output Parameter: 3735 . num - The output sequence number 3736 3737 Level: intermediate 3738 3739 Note: This is intended for output that should appear in sequence, for instance 3740 a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system. 3741 3742 .seealso: VecView() 3743 @*/ 3744 PetscErrorCode DMGetOutputSequenceNumber(DM dm, PetscInt *num) 3745 { 3746 PetscFunctionBegin; 3747 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3748 PetscValidPointer(num,2); 3749 *num = dm->outputSequenceNum; 3750 PetscFunctionReturn(0); 3751 } 3752 3753 #undef __FUNCT__ 3754 #define __FUNCT__ "DMSetOutputSequenceNumber" 3755 /*@ 3756 DMSetOutputSequenceNumber - Set the sequence number for output 3757 3758 Input Parameters: 3759 + dm - The original DM 3760 - num - The output sequence number 3761 3762 Level: intermediate 3763 3764 Note: This is intended for output that should appear in sequence, for instance 3765 a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system. 3766 3767 .seealso: VecView() 3768 @*/ 3769 PetscErrorCode DMSetOutputSequenceNumber(DM dm, PetscInt num) 3770 { 3771 PetscFunctionBegin; 3772 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3773 dm->outputSequenceNum = num; 3774 PetscFunctionReturn(0); 3775 } 3776