1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0}; 8 9 #undef __FUNCT__ 10 #define __FUNCT__ "DMCreate" 11 /*@ 12 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 13 14 If you never call DMSetType() it will generate an 15 error when you try to use the vector. 16 17 Collective on MPI_Comm 18 19 Input Parameter: 20 . comm - The communicator for the DM object 21 22 Output Parameter: 23 . dm - The DM object 24 25 Level: beginner 26 27 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 28 @*/ 29 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 30 { 31 DM v; 32 PetscErrorCode ierr; 33 34 PetscFunctionBegin; 35 PetscValidPointer(dm,2); 36 *dm = NULL; 37 ierr = PetscSysInitializePackage();CHKERRQ(ierr); 38 ierr = VecInitializePackage();CHKERRQ(ierr); 39 ierr = MatInitializePackage();CHKERRQ(ierr); 40 ierr = DMInitializePackage();CHKERRQ(ierr); 41 42 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 43 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 44 45 46 v->ltogmap = NULL; 47 v->ltogmapb = NULL; 48 v->bs = 1; 49 v->coloringtype = IS_COLORING_GLOBAL; 50 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 51 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 52 v->defaultSection = NULL; 53 v->defaultGlobalSection = NULL; 54 v->L = NULL; 55 v->maxCell = NULL; 56 { 57 PetscInt i; 58 for (i = 0; i < 10; ++i) { 59 v->nullspaceConstructors[i] = NULL; 60 } 61 } 62 v->numFields = 0; 63 v->fields = NULL; 64 v->dmBC = NULL; 65 v->outputSequenceNum = -1; 66 ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr); 67 ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr); 68 *dm = v; 69 PetscFunctionReturn(0); 70 } 71 72 #undef __FUNCT__ 73 #define __FUNCT__ "DMClone" 74 /*@ 75 DMClone - Creates a DM object with the same topology as the original. 76 77 Collective on MPI_Comm 78 79 Input Parameter: 80 . dm - The original DM object 81 82 Output Parameter: 83 . newdm - The new DM object 84 85 Level: beginner 86 87 .keywords: DM, topology, create 88 @*/ 89 PetscErrorCode DMClone(DM dm, DM *newdm) 90 { 91 PetscSF sf; 92 Vec coords; 93 void *ctx; 94 PetscErrorCode ierr; 95 96 PetscFunctionBegin; 97 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 98 PetscValidPointer(newdm,2); 99 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 100 if (dm->ops->clone) { 101 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 102 } 103 (*newdm)->setupcalled = PETSC_TRUE; 104 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 105 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 106 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 107 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 108 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 109 if (coords) { 110 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 111 } else { 112 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 113 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 114 } 115 if (dm->maxCell) { 116 const PetscReal *maxCell, *L; 117 ierr = DMGetPeriodicity(dm, &maxCell, &L);CHKERRQ(ierr); 118 ierr = DMSetPeriodicity(*newdm, maxCell, L);CHKERRQ(ierr); 119 } 120 PetscFunctionReturn(0); 121 } 122 123 #undef __FUNCT__ 124 #define __FUNCT__ "DMSetVecType" 125 /*@C 126 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 127 128 Logically Collective on DMDA 129 130 Input Parameter: 131 + da - initial distributed array 132 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 133 134 Options Database: 135 . -dm_vec_type ctype 136 137 Level: intermediate 138 139 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType() 140 @*/ 141 PetscErrorCode DMSetVecType(DM da,VecType ctype) 142 { 143 PetscErrorCode ierr; 144 145 PetscFunctionBegin; 146 PetscValidHeaderSpecific(da,DM_CLASSID,1); 147 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 148 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 149 PetscFunctionReturn(0); 150 } 151 152 #undef __FUNCT__ 153 #define __FUNCT__ "DMGetVecType" 154 /*@C 155 DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 156 157 Logically Collective on DMDA 158 159 Input Parameter: 160 . da - initial distributed array 161 162 Output Parameter: 163 . ctype - the vector type 164 165 Level: intermediate 166 167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 168 @*/ 169 PetscErrorCode DMGetVecType(DM da,VecType *ctype) 170 { 171 PetscFunctionBegin; 172 PetscValidHeaderSpecific(da,DM_CLASSID,1); 173 *ctype = da->vectype; 174 PetscFunctionReturn(0); 175 } 176 177 #undef __FUNCT__ 178 #define __FUNCT__ "VecGetDM" 179 /*@ 180 VecGetDM - Gets the DM defining the data layout of the vector 181 182 Not collective 183 184 Input Parameter: 185 . v - The Vec 186 187 Output Parameter: 188 . dm - The DM 189 190 Level: intermediate 191 192 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 193 @*/ 194 PetscErrorCode VecGetDM(Vec v, DM *dm) 195 { 196 PetscErrorCode ierr; 197 198 PetscFunctionBegin; 199 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 200 PetscValidPointer(dm,2); 201 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 202 PetscFunctionReturn(0); 203 } 204 205 #undef __FUNCT__ 206 #define __FUNCT__ "VecSetDM" 207 /*@ 208 VecSetDM - Sets the DM defining the data layout of the vector 209 210 Not collective 211 212 Input Parameters: 213 + v - The Vec 214 - dm - The DM 215 216 Level: intermediate 217 218 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 219 @*/ 220 PetscErrorCode VecSetDM(Vec v, DM dm) 221 { 222 PetscErrorCode ierr; 223 224 PetscFunctionBegin; 225 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 226 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 227 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 228 PetscFunctionReturn(0); 229 } 230 231 #undef __FUNCT__ 232 #define __FUNCT__ "DMSetMatType" 233 /*@C 234 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 235 236 Logically Collective on DM 237 238 Input Parameter: 239 + dm - the DM context 240 . ctype - the matrix type 241 242 Options Database: 243 . -dm_mat_type ctype 244 245 Level: intermediate 246 247 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType() 248 @*/ 249 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 250 { 251 PetscErrorCode ierr; 252 253 PetscFunctionBegin; 254 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 255 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 256 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 257 PetscFunctionReturn(0); 258 } 259 260 #undef __FUNCT__ 261 #define __FUNCT__ "DMGetMatType" 262 /*@C 263 DMGetMatType - Gets the type of matrix created with DMCreateMatrix() 264 265 Logically Collective on DM 266 267 Input Parameter: 268 . dm - the DM context 269 270 Output Parameter: 271 . ctype - the matrix type 272 273 Options Database: 274 . -dm_mat_type ctype 275 276 Level: intermediate 277 278 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType() 279 @*/ 280 PetscErrorCode DMGetMatType(DM dm,MatType *ctype) 281 { 282 PetscFunctionBegin; 283 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 284 *ctype = dm->mattype; 285 PetscFunctionReturn(0); 286 } 287 288 #undef __FUNCT__ 289 #define __FUNCT__ "MatGetDM" 290 /*@ 291 MatGetDM - Gets the DM defining the data layout of the matrix 292 293 Not collective 294 295 Input Parameter: 296 . A - The Mat 297 298 Output Parameter: 299 . dm - The DM 300 301 Level: intermediate 302 303 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 304 @*/ 305 PetscErrorCode MatGetDM(Mat A, DM *dm) 306 { 307 PetscErrorCode ierr; 308 309 PetscFunctionBegin; 310 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 311 PetscValidPointer(dm,2); 312 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 313 PetscFunctionReturn(0); 314 } 315 316 #undef __FUNCT__ 317 #define __FUNCT__ "MatSetDM" 318 /*@ 319 MatSetDM - Sets the DM defining the data layout of the matrix 320 321 Not collective 322 323 Input Parameters: 324 + A - The Mat 325 - dm - The DM 326 327 Level: intermediate 328 329 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 330 @*/ 331 PetscErrorCode MatSetDM(Mat A, DM dm) 332 { 333 PetscErrorCode ierr; 334 335 PetscFunctionBegin; 336 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 337 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 338 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 339 PetscFunctionReturn(0); 340 } 341 342 #undef __FUNCT__ 343 #define __FUNCT__ "DMSetOptionsPrefix" 344 /*@C 345 DMSetOptionsPrefix - Sets the prefix used for searching for all 346 DMDA options in the database. 347 348 Logically Collective on DMDA 349 350 Input Parameter: 351 + da - the DMDA context 352 - prefix - the prefix to prepend to all option names 353 354 Notes: 355 A hyphen (-) must NOT be given at the beginning of the prefix name. 356 The first character of all runtime options is AUTOMATICALLY the hyphen. 357 358 Level: advanced 359 360 .keywords: DMDA, set, options, prefix, database 361 362 .seealso: DMSetFromOptions() 363 @*/ 364 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 365 { 366 PetscErrorCode ierr; 367 368 PetscFunctionBegin; 369 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 370 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 371 PetscFunctionReturn(0); 372 } 373 374 #undef __FUNCT__ 375 #define __FUNCT__ "DMDestroy" 376 /*@ 377 DMDestroy - Destroys a vector packer or DMDA. 378 379 Collective on DM 380 381 Input Parameter: 382 . dm - the DM object to destroy 383 384 Level: developer 385 386 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 387 388 @*/ 389 PetscErrorCode DMDestroy(DM *dm) 390 { 391 PetscInt i, cnt = 0, f; 392 DMNamedVecLink nlink,nnext; 393 PetscErrorCode ierr; 394 395 PetscFunctionBegin; 396 if (!*dm) PetscFunctionReturn(0); 397 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 398 399 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 400 for (f = 0; f < (*dm)->numFields; ++f) { 401 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 402 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 403 ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 404 } 405 /* count all the circular references of DM and its contained Vecs */ 406 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 407 if ((*dm)->localin[i]) cnt++; 408 if ((*dm)->globalin[i]) cnt++; 409 } 410 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 411 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 412 if ((*dm)->x) { 413 DM obj; 414 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 415 if (obj == *dm) cnt++; 416 } 417 418 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 419 /* 420 Need this test because the dm references the vectors that 421 reference the dm, so destroying the dm calls destroy on the 422 vectors that cause another destroy on the dm 423 */ 424 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 425 ((PetscObject) (*dm))->refct = 0; 426 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 427 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 428 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 429 } 430 nnext=(*dm)->namedglobal; 431 (*dm)->namedglobal = NULL; 432 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */ 433 nnext = nlink->next; 434 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 435 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 436 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 437 ierr = PetscFree(nlink);CHKERRQ(ierr); 438 } 439 nnext=(*dm)->namedlocal; 440 (*dm)->namedlocal = NULL; 441 for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */ 442 nnext = nlink->next; 443 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 444 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 445 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 446 ierr = PetscFree(nlink);CHKERRQ(ierr); 447 } 448 449 /* Destroy the list of hooks */ 450 { 451 DMCoarsenHookLink link,next; 452 for (link=(*dm)->coarsenhook; link; link=next) { 453 next = link->next; 454 ierr = PetscFree(link);CHKERRQ(ierr); 455 } 456 (*dm)->coarsenhook = NULL; 457 } 458 { 459 DMRefineHookLink link,next; 460 for (link=(*dm)->refinehook; link; link=next) { 461 next = link->next; 462 ierr = PetscFree(link);CHKERRQ(ierr); 463 } 464 (*dm)->refinehook = NULL; 465 } 466 { 467 DMSubDomainHookLink link,next; 468 for (link=(*dm)->subdomainhook; link; link=next) { 469 next = link->next; 470 ierr = PetscFree(link);CHKERRQ(ierr); 471 } 472 (*dm)->subdomainhook = NULL; 473 } 474 { 475 DMGlobalToLocalHookLink link,next; 476 for (link=(*dm)->gtolhook; link; link=next) { 477 next = link->next; 478 ierr = PetscFree(link);CHKERRQ(ierr); 479 } 480 (*dm)->gtolhook = NULL; 481 } 482 /* Destroy the work arrays */ 483 { 484 DMWorkLink link,next; 485 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 486 for (link=(*dm)->workin; link; link=next) { 487 next = link->next; 488 ierr = PetscFree(link->mem);CHKERRQ(ierr); 489 ierr = PetscFree(link);CHKERRQ(ierr); 490 } 491 (*dm)->workin = NULL; 492 } 493 494 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 495 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 496 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 497 498 if ((*dm)->ctx && (*dm)->ctxdestroy) { 499 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 500 } 501 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 502 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 503 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 504 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 505 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 506 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 507 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 508 509 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 510 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 511 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 512 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 513 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 514 515 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 516 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 517 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 518 ierr = PetscFree((*dm)->maxCell);CHKERRQ(ierr); 519 ierr = PetscFree((*dm)->L);CHKERRQ(ierr); 520 521 for (f = 0; f < (*dm)->numFields; ++f) { 522 ierr = PetscObjectDestroy((PetscObject *) &(*dm)->fields[f]);CHKERRQ(ierr); 523 } 524 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 525 ierr = DMDestroy(&(*dm)->dmBC);CHKERRQ(ierr); 526 /* if memory was published with SAWs then destroy it */ 527 ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr); 528 529 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 530 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 531 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 532 PetscFunctionReturn(0); 533 } 534 535 #undef __FUNCT__ 536 #define __FUNCT__ "DMSetUp" 537 /*@ 538 DMSetUp - sets up the data structures inside a DM object 539 540 Collective on DM 541 542 Input Parameter: 543 . dm - the DM object to setup 544 545 Level: developer 546 547 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 548 549 @*/ 550 PetscErrorCode DMSetUp(DM dm) 551 { 552 PetscErrorCode ierr; 553 554 PetscFunctionBegin; 555 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 556 if (dm->setupcalled) PetscFunctionReturn(0); 557 if (dm->ops->setup) { 558 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 559 } 560 dm->setupcalled = PETSC_TRUE; 561 PetscFunctionReturn(0); 562 } 563 564 #undef __FUNCT__ 565 #define __FUNCT__ "DMSetFromOptions" 566 /*@ 567 DMSetFromOptions - sets parameters in a DM from the options database 568 569 Collective on DM 570 571 Input Parameter: 572 . dm - the DM object to set options for 573 574 Options Database: 575 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 576 . -dm_vec_type <type> type of vector to create inside DM 577 . -dm_mat_type <type> type of matrix to create inside DM 578 - -dm_coloring_type <global or ghosted> 579 580 Level: developer 581 582 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 583 584 @*/ 585 PetscErrorCode DMSetFromOptions(DM dm) 586 { 587 char typeName[256]; 588 PetscBool flg; 589 PetscErrorCode ierr; 590 591 PetscFunctionBegin; 592 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 593 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 594 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 595 ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 596 if (flg) { 597 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 598 } 599 ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 600 if (flg) { 601 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 602 } 603 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 604 if (dm->ops->setfromoptions) { 605 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 606 } 607 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 608 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 609 ierr = PetscOptionsEnd();CHKERRQ(ierr); 610 PetscFunctionReturn(0); 611 } 612 613 #undef __FUNCT__ 614 #define __FUNCT__ "DMView" 615 /*@C 616 DMView - Views a vector packer or DMDA. 617 618 Collective on DM 619 620 Input Parameter: 621 + dm - the DM object to view 622 - v - the viewer 623 624 Level: developer 625 626 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 627 628 @*/ 629 PetscErrorCode DMView(DM dm,PetscViewer v) 630 { 631 PetscErrorCode ierr; 632 PetscBool isbinary; 633 634 PetscFunctionBegin; 635 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 636 if (!v) { 637 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 638 } 639 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 640 if (isbinary) { 641 PetscInt classid = DM_FILE_CLASSID; 642 char type[256]; 643 644 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 645 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 646 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 647 } 648 if (dm->ops->view) { 649 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 650 } 651 PetscFunctionReturn(0); 652 } 653 654 #undef __FUNCT__ 655 #define __FUNCT__ "DMCreateGlobalVector" 656 /*@ 657 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 658 659 Collective on DM 660 661 Input Parameter: 662 . dm - the DM object 663 664 Output Parameter: 665 . vec - the global vector 666 667 Level: beginner 668 669 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 670 671 @*/ 672 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 673 { 674 PetscErrorCode ierr; 675 676 PetscFunctionBegin; 677 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 678 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 679 PetscFunctionReturn(0); 680 } 681 682 #undef __FUNCT__ 683 #define __FUNCT__ "DMCreateLocalVector" 684 /*@ 685 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 686 687 Not Collective 688 689 Input Parameter: 690 . dm - the DM object 691 692 Output Parameter: 693 . vec - the local vector 694 695 Level: beginner 696 697 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 698 699 @*/ 700 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 701 { 702 PetscErrorCode ierr; 703 704 PetscFunctionBegin; 705 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 706 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 707 PetscFunctionReturn(0); 708 } 709 710 #undef __FUNCT__ 711 #define __FUNCT__ "DMGetLocalToGlobalMapping" 712 /*@ 713 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 714 715 Collective on DM 716 717 Input Parameter: 718 . dm - the DM that provides the mapping 719 720 Output Parameter: 721 . ltog - the mapping 722 723 Level: intermediate 724 725 Notes: 726 This mapping can then be used by VecSetLocalToGlobalMapping() or 727 MatSetLocalToGlobalMapping(). 728 729 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 730 @*/ 731 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 732 { 733 PetscErrorCode ierr; 734 735 PetscFunctionBegin; 736 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 737 PetscValidPointer(ltog,2); 738 if (!dm->ltogmap) { 739 PetscSection section, sectionGlobal; 740 741 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 742 if (section) { 743 PetscInt *ltog; 744 PetscInt pStart, pEnd, size, p, l; 745 746 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 747 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 748 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 749 ierr = PetscMalloc1(size, <og);CHKERRQ(ierr); /* We want the local+overlap size */ 750 for (p = pStart, l = 0; p < pEnd; ++p) { 751 PetscInt dof, off, c; 752 753 /* Should probably use constrained dofs */ 754 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 755 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 756 for (c = 0; c < dof; ++c, ++l) { 757 ltog[l] = off+c; 758 } 759 } 760 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 761 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 762 } else { 763 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 764 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 765 } 766 } 767 *ltog = dm->ltogmap; 768 PetscFunctionReturn(0); 769 } 770 771 #undef __FUNCT__ 772 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 773 /*@ 774 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 775 776 Collective on DM 777 778 Input Parameter: 779 . da - the distributed array that provides the mapping 780 781 Output Parameter: 782 . ltog - the block mapping 783 784 Level: intermediate 785 786 Notes: 787 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 788 MatSetLocalToGlobalMappingBlock(). 789 790 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 791 @*/ 792 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 793 { 794 PetscErrorCode ierr; 795 796 PetscFunctionBegin; 797 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 798 PetscValidPointer(ltog,2); 799 if (!dm->ltogmapb) { 800 PetscInt bs; 801 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 802 if (bs > 1) { 803 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 804 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 805 } else { 806 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 807 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 808 } 809 } 810 *ltog = dm->ltogmapb; 811 PetscFunctionReturn(0); 812 } 813 814 #undef __FUNCT__ 815 #define __FUNCT__ "DMGetBlockSize" 816 /*@ 817 DMGetBlockSize - Gets the inherent block size associated with a DM 818 819 Not Collective 820 821 Input Parameter: 822 . dm - the DM with block structure 823 824 Output Parameter: 825 . bs - the block size, 1 implies no exploitable block structure 826 827 Level: intermediate 828 829 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 830 @*/ 831 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 832 { 833 PetscFunctionBegin; 834 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 835 PetscValidPointer(bs,2); 836 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 837 *bs = dm->bs; 838 PetscFunctionReturn(0); 839 } 840 841 #undef __FUNCT__ 842 #define __FUNCT__ "DMCreateInterpolation" 843 /*@ 844 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 845 846 Collective on DM 847 848 Input Parameter: 849 + dm1 - the DM object 850 - dm2 - the second, finer DM object 851 852 Output Parameter: 853 + mat - the interpolation 854 - vec - the scaling (optional) 855 856 Level: developer 857 858 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 859 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 860 861 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 862 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 863 864 865 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 866 867 @*/ 868 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 869 { 870 PetscErrorCode ierr; 871 872 PetscFunctionBegin; 873 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 874 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 875 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 876 PetscFunctionReturn(0); 877 } 878 879 #undef __FUNCT__ 880 #define __FUNCT__ "DMCreateInjection" 881 /*@ 882 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 883 884 Collective on DM 885 886 Input Parameter: 887 + dm1 - the DM object 888 - dm2 - the second, finer DM object 889 890 Output Parameter: 891 . ctx - the injection 892 893 Level: developer 894 895 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 896 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 897 898 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 899 900 @*/ 901 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 902 { 903 PetscErrorCode ierr; 904 905 PetscFunctionBegin; 906 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 907 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 908 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 909 PetscFunctionReturn(0); 910 } 911 912 #undef __FUNCT__ 913 #define __FUNCT__ "DMCreateColoring" 914 /*@ 915 DMCreateColoring - Gets coloring for a DM 916 917 Collective on DM 918 919 Input Parameter: 920 + dm - the DM object 921 - ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 922 923 Output Parameter: 924 . coloring - the coloring 925 926 Level: developer 927 928 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType() 929 930 @*/ 931 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring) 932 { 933 PetscErrorCode ierr; 934 935 PetscFunctionBegin; 936 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 937 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 938 ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr); 939 PetscFunctionReturn(0); 940 } 941 942 #undef __FUNCT__ 943 #define __FUNCT__ "DMCreateMatrix" 944 /*@ 945 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 946 947 Collective on DM 948 949 Input Parameter: 950 . dm - the DM object 951 952 Output Parameter: 953 . mat - the empty Jacobian 954 955 Level: beginner 956 957 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 958 do not need to do it yourself. 959 960 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 961 the nonzero pattern call DMDASetMatPreallocateOnly() 962 963 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 964 internally by PETSc. 965 966 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 967 the indices for the global numbering for DMDAs which is complicated. 968 969 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType() 970 971 @*/ 972 PetscErrorCode DMCreateMatrix(DM dm,Mat *mat) 973 { 974 PetscErrorCode ierr; 975 976 PetscFunctionBegin; 977 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 978 ierr = MatInitializePackage();CHKERRQ(ierr); 979 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 980 PetscValidPointer(mat,3); 981 ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr); 982 PetscFunctionReturn(0); 983 } 984 985 #undef __FUNCT__ 986 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 987 /*@ 988 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 989 preallocated but the nonzero structure and zero values will not be set. 990 991 Logically Collective on DMDA 992 993 Input Parameter: 994 + dm - the DM 995 - only - PETSC_TRUE if only want preallocation 996 997 Level: developer 998 .seealso DMCreateMatrix() 999 @*/ 1000 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 1001 { 1002 PetscFunctionBegin; 1003 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1004 dm->prealloc_only = only; 1005 PetscFunctionReturn(0); 1006 } 1007 1008 #undef __FUNCT__ 1009 #define __FUNCT__ "DMGetWorkArray" 1010 /*@C 1011 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1012 1013 Not Collective 1014 1015 Input Parameters: 1016 + dm - the DM object 1017 . count - The minium size 1018 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1019 1020 Output Parameter: 1021 . array - the work array 1022 1023 Level: developer 1024 1025 .seealso DMDestroy(), DMCreate() 1026 @*/ 1027 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1028 { 1029 PetscErrorCode ierr; 1030 DMWorkLink link; 1031 size_t size; 1032 1033 PetscFunctionBegin; 1034 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1035 PetscValidPointer(mem,4); 1036 if (dm->workin) { 1037 link = dm->workin; 1038 dm->workin = dm->workin->next; 1039 } else { 1040 ierr = PetscNewLog(dm,&link);CHKERRQ(ierr); 1041 } 1042 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1043 if (size*count > link->bytes) { 1044 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1045 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1046 link->bytes = size*count; 1047 } 1048 link->next = dm->workout; 1049 dm->workout = link; 1050 *(void**)mem = link->mem; 1051 PetscFunctionReturn(0); 1052 } 1053 1054 #undef __FUNCT__ 1055 #define __FUNCT__ "DMRestoreWorkArray" 1056 /*@C 1057 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1058 1059 Not Collective 1060 1061 Input Parameters: 1062 + dm - the DM object 1063 . count - The minium size 1064 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1065 1066 Output Parameter: 1067 . array - the work array 1068 1069 Level: developer 1070 1071 .seealso DMDestroy(), DMCreate() 1072 @*/ 1073 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1074 { 1075 DMWorkLink *p,link; 1076 1077 PetscFunctionBegin; 1078 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1079 PetscValidPointer(mem,4); 1080 for (p=&dm->workout; (link=*p); p=&link->next) { 1081 if (link->mem == *(void**)mem) { 1082 *p = link->next; 1083 link->next = dm->workin; 1084 dm->workin = link; 1085 *(void**)mem = NULL; 1086 PetscFunctionReturn(0); 1087 } 1088 } 1089 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1090 PetscFunctionReturn(0); 1091 } 1092 1093 #undef __FUNCT__ 1094 #define __FUNCT__ "DMSetNullSpaceConstructor" 1095 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1096 { 1097 PetscFunctionBegin; 1098 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1099 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1100 dm->nullspaceConstructors[field] = nullsp; 1101 PetscFunctionReturn(0); 1102 } 1103 1104 #undef __FUNCT__ 1105 #define __FUNCT__ "DMCreateFieldIS" 1106 /*@C 1107 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1108 1109 Not collective 1110 1111 Input Parameter: 1112 . dm - the DM object 1113 1114 Output Parameters: 1115 + numFields - The number of fields (or NULL if not requested) 1116 . fieldNames - The name for each field (or NULL if not requested) 1117 - fields - The global indices for each field (or NULL if not requested) 1118 1119 Level: intermediate 1120 1121 Notes: 1122 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1123 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1124 PetscFree(). 1125 1126 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1127 @*/ 1128 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1129 { 1130 PetscSection section, sectionGlobal; 1131 PetscErrorCode ierr; 1132 1133 PetscFunctionBegin; 1134 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1135 if (numFields) { 1136 PetscValidPointer(numFields,2); 1137 *numFields = 0; 1138 } 1139 if (fieldNames) { 1140 PetscValidPointer(fieldNames,3); 1141 *fieldNames = NULL; 1142 } 1143 if (fields) { 1144 PetscValidPointer(fields,4); 1145 *fields = NULL; 1146 } 1147 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1148 if (section) { 1149 PetscInt *fieldSizes, **fieldIndices; 1150 PetscInt nF, f, pStart, pEnd, p; 1151 1152 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1153 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1154 ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr); 1155 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1156 for (f = 0; f < nF; ++f) { 1157 fieldSizes[f] = 0; 1158 } 1159 for (p = pStart; p < pEnd; ++p) { 1160 PetscInt gdof; 1161 1162 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1163 if (gdof > 0) { 1164 for (f = 0; f < nF; ++f) { 1165 PetscInt fdof, fcdof; 1166 1167 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1168 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1169 fieldSizes[f] += fdof-fcdof; 1170 } 1171 } 1172 } 1173 for (f = 0; f < nF; ++f) { 1174 ierr = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr); 1175 fieldSizes[f] = 0; 1176 } 1177 for (p = pStart; p < pEnd; ++p) { 1178 PetscInt gdof, goff; 1179 1180 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1181 if (gdof > 0) { 1182 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1183 for (f = 0; f < nF; ++f) { 1184 PetscInt fdof, fcdof, fc; 1185 1186 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1187 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1188 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1189 fieldIndices[f][fieldSizes[f]] = goff++; 1190 } 1191 } 1192 } 1193 } 1194 if (numFields) *numFields = nF; 1195 if (fieldNames) { 1196 ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr); 1197 for (f = 0; f < nF; ++f) { 1198 const char *fieldName; 1199 1200 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1201 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1202 } 1203 } 1204 if (fields) { 1205 ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr); 1206 for (f = 0; f < nF; ++f) { 1207 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1208 } 1209 } 1210 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1211 } else if (dm->ops->createfieldis) { 1212 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1213 } 1214 PetscFunctionReturn(0); 1215 } 1216 1217 1218 #undef __FUNCT__ 1219 #define __FUNCT__ "DMCreateFieldDecomposition" 1220 /*@C 1221 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1222 corresponding to different fields: each IS contains the global indices of the dofs of the 1223 corresponding field. The optional list of DMs define the DM for each subproblem. 1224 Generalizes DMCreateFieldIS(). 1225 1226 Not collective 1227 1228 Input Parameter: 1229 . dm - the DM object 1230 1231 Output Parameters: 1232 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1233 . namelist - The name for each field (or NULL if not requested) 1234 . islist - The global indices for each field (or NULL if not requested) 1235 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1236 1237 Level: intermediate 1238 1239 Notes: 1240 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1241 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1242 and all of the arrays should be freed with PetscFree(). 1243 1244 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1245 @*/ 1246 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1247 { 1248 PetscErrorCode ierr; 1249 1250 PetscFunctionBegin; 1251 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1252 if (len) { 1253 PetscValidPointer(len,2); 1254 *len = 0; 1255 } 1256 if (namelist) { 1257 PetscValidPointer(namelist,3); 1258 *namelist = 0; 1259 } 1260 if (islist) { 1261 PetscValidPointer(islist,4); 1262 *islist = 0; 1263 } 1264 if (dmlist) { 1265 PetscValidPointer(dmlist,5); 1266 *dmlist = 0; 1267 } 1268 /* 1269 Is it a good idea to apply the following check across all impls? 1270 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1271 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1272 */ 1273 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1274 if (!dm->ops->createfielddecomposition) { 1275 PetscSection section; 1276 PetscInt numFields, f; 1277 1278 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1279 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1280 if (section && numFields && dm->ops->createsubdm) { 1281 *len = numFields; 1282 if (namelist) {ierr = PetscMalloc1(numFields,namelist);CHKERRQ(ierr);} 1283 if (islist) {ierr = PetscMalloc1(numFields,islist);CHKERRQ(ierr);} 1284 if (dmlist) {ierr = PetscMalloc1(numFields,dmlist);CHKERRQ(ierr);} 1285 for (f = 0; f < numFields; ++f) { 1286 const char *fieldName; 1287 1288 ierr = DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);CHKERRQ(ierr); 1289 if (namelist) { 1290 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1291 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1292 } 1293 } 1294 } else { 1295 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1296 /* By default there are no DMs associated with subproblems. */ 1297 if (dmlist) *dmlist = NULL; 1298 } 1299 } else { 1300 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1301 } 1302 PetscFunctionReturn(0); 1303 } 1304 1305 #undef __FUNCT__ 1306 #define __FUNCT__ "DMCreateSubDM" 1307 /*@C 1308 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1309 The fields are defined by DMCreateFieldIS(). 1310 1311 Not collective 1312 1313 Input Parameters: 1314 + dm - the DM object 1315 . numFields - number of fields in this subproblem 1316 - len - The number of subproblems in the decomposition (or NULL if not requested) 1317 1318 Output Parameters: 1319 . is - The global indices for the subproblem 1320 - dm - The DM for the subproblem 1321 1322 Level: intermediate 1323 1324 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1325 @*/ 1326 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1327 { 1328 PetscErrorCode ierr; 1329 1330 PetscFunctionBegin; 1331 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1332 PetscValidPointer(fields,3); 1333 if (is) PetscValidPointer(is,4); 1334 if (subdm) PetscValidPointer(subdm,5); 1335 if (dm->ops->createsubdm) { 1336 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1337 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1338 PetscFunctionReturn(0); 1339 } 1340 1341 1342 #undef __FUNCT__ 1343 #define __FUNCT__ "DMCreateDomainDecomposition" 1344 /*@C 1345 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1346 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1347 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1348 define a nonoverlapping covering, while outer subdomains can overlap. 1349 The optional list of DMs define the DM for each subproblem. 1350 1351 Not collective 1352 1353 Input Parameter: 1354 . dm - the DM object 1355 1356 Output Parameters: 1357 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1358 . namelist - The name for each subdomain (or NULL if not requested) 1359 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1360 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1361 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1362 1363 Level: intermediate 1364 1365 Notes: 1366 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1367 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1368 and all of the arrays should be freed with PetscFree(). 1369 1370 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1371 @*/ 1372 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1373 { 1374 PetscErrorCode ierr; 1375 DMSubDomainHookLink link; 1376 PetscInt i,l; 1377 1378 PetscFunctionBegin; 1379 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1380 if (len) {PetscValidPointer(len,2); *len = 0;} 1381 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1382 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1383 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1384 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1385 /* 1386 Is it a good idea to apply the following check across all impls? 1387 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1388 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1389 */ 1390 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1391 if (dm->ops->createdomaindecomposition) { 1392 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1393 /* copy subdomain hooks and context over to the subdomain DMs */ 1394 if (dmlist) { 1395 for (i = 0; i < l; i++) { 1396 for (link=dm->subdomainhook; link; link=link->next) { 1397 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1398 } 1399 (*dmlist)[i]->ctx = dm->ctx; 1400 } 1401 } 1402 if (len) *len = l; 1403 } 1404 PetscFunctionReturn(0); 1405 } 1406 1407 1408 #undef __FUNCT__ 1409 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1410 /*@C 1411 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1412 1413 Not collective 1414 1415 Input Parameters: 1416 + dm - the DM object 1417 . n - the number of subdomain scatters 1418 - subdms - the local subdomains 1419 1420 Output Parameters: 1421 + n - the number of scatters returned 1422 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1423 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1424 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1425 1426 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1427 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1428 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1429 solution and residual data. 1430 1431 Level: developer 1432 1433 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1434 @*/ 1435 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1436 { 1437 PetscErrorCode ierr; 1438 1439 PetscFunctionBegin; 1440 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1441 PetscValidPointer(subdms,3); 1442 if (dm->ops->createddscatters) { 1443 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1444 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1445 PetscFunctionReturn(0); 1446 } 1447 1448 #undef __FUNCT__ 1449 #define __FUNCT__ "DMRefine" 1450 /*@ 1451 DMRefine - Refines a DM object 1452 1453 Collective on DM 1454 1455 Input Parameter: 1456 + dm - the DM object 1457 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1458 1459 Output Parameter: 1460 . dmf - the refined DM, or NULL 1461 1462 Note: If no refinement was done, the return value is NULL 1463 1464 Level: developer 1465 1466 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1467 @*/ 1468 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1469 { 1470 PetscErrorCode ierr; 1471 DMRefineHookLink link; 1472 1473 PetscFunctionBegin; 1474 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1475 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1476 if (*dmf) { 1477 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1478 1479 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1480 1481 (*dmf)->ctx = dm->ctx; 1482 (*dmf)->leveldown = dm->leveldown; 1483 (*dmf)->levelup = dm->levelup + 1; 1484 1485 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1486 for (link=dm->refinehook; link; link=link->next) { 1487 if (link->refinehook) { 1488 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1489 } 1490 } 1491 } 1492 PetscFunctionReturn(0); 1493 } 1494 1495 #undef __FUNCT__ 1496 #define __FUNCT__ "DMRefineHookAdd" 1497 /*@C 1498 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1499 1500 Logically Collective 1501 1502 Input Arguments: 1503 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1504 . refinehook - function to run when setting up a coarser level 1505 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1506 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1507 1508 Calling sequence of refinehook: 1509 $ refinehook(DM coarse,DM fine,void *ctx); 1510 1511 + coarse - coarse level DM 1512 . fine - fine level DM to interpolate problem to 1513 - ctx - optional user-defined function context 1514 1515 Calling sequence for interphook: 1516 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1517 1518 + coarse - coarse level DM 1519 . interp - matrix interpolating a coarse-level solution to the finer grid 1520 . fine - fine level DM to update 1521 - ctx - optional user-defined function context 1522 1523 Level: advanced 1524 1525 Notes: 1526 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1527 1528 If this function is called multiple times, the hooks will be run in the order they are added. 1529 1530 This function is currently not available from Fortran. 1531 1532 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1533 @*/ 1534 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1535 { 1536 PetscErrorCode ierr; 1537 DMRefineHookLink link,*p; 1538 1539 PetscFunctionBegin; 1540 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1541 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1542 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1543 link->refinehook = refinehook; 1544 link->interphook = interphook; 1545 link->ctx = ctx; 1546 link->next = NULL; 1547 *p = link; 1548 PetscFunctionReturn(0); 1549 } 1550 1551 #undef __FUNCT__ 1552 #define __FUNCT__ "DMInterpolate" 1553 /*@ 1554 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1555 1556 Collective if any hooks are 1557 1558 Input Arguments: 1559 + coarse - coarser DM to use as a base 1560 . restrct - interpolation matrix, apply using MatInterpolate() 1561 - fine - finer DM to update 1562 1563 Level: developer 1564 1565 .seealso: DMRefineHookAdd(), MatInterpolate() 1566 @*/ 1567 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1568 { 1569 PetscErrorCode ierr; 1570 DMRefineHookLink link; 1571 1572 PetscFunctionBegin; 1573 for (link=fine->refinehook; link; link=link->next) { 1574 if (link->interphook) { 1575 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1576 } 1577 } 1578 PetscFunctionReturn(0); 1579 } 1580 1581 #undef __FUNCT__ 1582 #define __FUNCT__ "DMGetRefineLevel" 1583 /*@ 1584 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1585 1586 Not Collective 1587 1588 Input Parameter: 1589 . dm - the DM object 1590 1591 Output Parameter: 1592 . level - number of refinements 1593 1594 Level: developer 1595 1596 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1597 1598 @*/ 1599 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1600 { 1601 PetscFunctionBegin; 1602 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1603 *level = dm->levelup; 1604 PetscFunctionReturn(0); 1605 } 1606 1607 #undef __FUNCT__ 1608 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1609 /*@C 1610 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1611 1612 Logically Collective 1613 1614 Input Arguments: 1615 + dm - the DM 1616 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1617 . endhook - function to run after DMGlobalToLocalEnd() has completed 1618 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1619 1620 Calling sequence for beginhook: 1621 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1622 1623 + dm - global DM 1624 . g - global vector 1625 . mode - mode 1626 . l - local vector 1627 - ctx - optional user-defined function context 1628 1629 1630 Calling sequence for endhook: 1631 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1632 1633 + global - global DM 1634 - ctx - optional user-defined function context 1635 1636 Level: advanced 1637 1638 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1639 @*/ 1640 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1641 { 1642 PetscErrorCode ierr; 1643 DMGlobalToLocalHookLink link,*p; 1644 1645 PetscFunctionBegin; 1646 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1647 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1648 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1649 link->beginhook = beginhook; 1650 link->endhook = endhook; 1651 link->ctx = ctx; 1652 link->next = NULL; 1653 *p = link; 1654 PetscFunctionReturn(0); 1655 } 1656 1657 #undef __FUNCT__ 1658 #define __FUNCT__ "DMGlobalToLocalBegin" 1659 /*@ 1660 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1661 1662 Neighbor-wise Collective on DM 1663 1664 Input Parameters: 1665 + dm - the DM object 1666 . g - the global vector 1667 . mode - INSERT_VALUES or ADD_VALUES 1668 - l - the local vector 1669 1670 1671 Level: beginner 1672 1673 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1674 1675 @*/ 1676 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1677 { 1678 PetscSF sf; 1679 PetscErrorCode ierr; 1680 DMGlobalToLocalHookLink link; 1681 1682 PetscFunctionBegin; 1683 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1684 for (link=dm->gtolhook; link; link=link->next) { 1685 if (link->beginhook) { 1686 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1687 } 1688 } 1689 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1690 if (sf) { 1691 PetscScalar *lArray, *gArray; 1692 1693 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1694 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1695 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1696 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1697 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1698 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1699 } else { 1700 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1701 } 1702 PetscFunctionReturn(0); 1703 } 1704 1705 #undef __FUNCT__ 1706 #define __FUNCT__ "DMGlobalToLocalEnd" 1707 /*@ 1708 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1709 1710 Neighbor-wise Collective on DM 1711 1712 Input Parameters: 1713 + dm - the DM object 1714 . g - the global vector 1715 . mode - INSERT_VALUES or ADD_VALUES 1716 - l - the local vector 1717 1718 1719 Level: beginner 1720 1721 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1722 1723 @*/ 1724 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1725 { 1726 PetscSF sf; 1727 PetscErrorCode ierr; 1728 PetscScalar *lArray, *gArray; 1729 DMGlobalToLocalHookLink link; 1730 1731 PetscFunctionBegin; 1732 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1733 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1734 if (sf) { 1735 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1736 1737 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1738 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1739 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1740 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1741 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1742 } else { 1743 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1744 } 1745 for (link=dm->gtolhook; link; link=link->next) { 1746 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1747 } 1748 PetscFunctionReturn(0); 1749 } 1750 1751 #undef __FUNCT__ 1752 #define __FUNCT__ "DMLocalToGlobalBegin" 1753 /*@ 1754 DMLocalToGlobalBegin - updates global vectors from local vectors 1755 1756 Neighbor-wise Collective on DM 1757 1758 Input Parameters: 1759 + dm - the DM object 1760 . l - the local vector 1761 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1762 base point. 1763 - - the global vector 1764 1765 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1766 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1767 global array to the final global array with VecAXPY(). 1768 1769 Level: beginner 1770 1771 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1772 1773 @*/ 1774 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1775 { 1776 PetscSF sf; 1777 PetscErrorCode ierr; 1778 1779 PetscFunctionBegin; 1780 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1781 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1782 if (sf) { 1783 MPI_Op op; 1784 PetscScalar *lArray, *gArray; 1785 1786 switch (mode) { 1787 case INSERT_VALUES: 1788 case INSERT_ALL_VALUES: 1789 op = MPIU_REPLACE; break; 1790 case ADD_VALUES: 1791 case ADD_ALL_VALUES: 1792 op = MPI_SUM; break; 1793 default: 1794 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1795 } 1796 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1797 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1798 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1799 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1800 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1801 } else { 1802 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1803 } 1804 PetscFunctionReturn(0); 1805 } 1806 1807 #undef __FUNCT__ 1808 #define __FUNCT__ "DMLocalToGlobalEnd" 1809 /*@ 1810 DMLocalToGlobalEnd - updates global vectors from local vectors 1811 1812 Neighbor-wise Collective on DM 1813 1814 Input Parameters: 1815 + dm - the DM object 1816 . l - the local vector 1817 . mode - INSERT_VALUES or ADD_VALUES 1818 - g - the global vector 1819 1820 1821 Level: beginner 1822 1823 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1824 1825 @*/ 1826 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1827 { 1828 PetscSF sf; 1829 PetscErrorCode ierr; 1830 1831 PetscFunctionBegin; 1832 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1833 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1834 if (sf) { 1835 MPI_Op op; 1836 PetscScalar *lArray, *gArray; 1837 1838 switch (mode) { 1839 case INSERT_VALUES: 1840 case INSERT_ALL_VALUES: 1841 op = MPIU_REPLACE; break; 1842 case ADD_VALUES: 1843 case ADD_ALL_VALUES: 1844 op = MPI_SUM; break; 1845 default: 1846 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1847 } 1848 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1849 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1850 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1851 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1852 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1853 } else { 1854 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1855 } 1856 PetscFunctionReturn(0); 1857 } 1858 1859 #undef __FUNCT__ 1860 #define __FUNCT__ "DMLocalToLocalBegin" 1861 /*@ 1862 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1863 that contain irrelevant values) to another local vector where the ghost 1864 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1865 1866 Neighbor-wise Collective on DM and Vec 1867 1868 Input Parameters: 1869 + dm - the DM object 1870 . g - the original local vector 1871 - mode - one of INSERT_VALUES or ADD_VALUES 1872 1873 Output Parameter: 1874 . l - the local vector with correct ghost values 1875 1876 Level: intermediate 1877 1878 Notes: 1879 The local vectors used here need not be the same as those 1880 obtained from DMCreateLocalVector(), BUT they 1881 must have the same parallel data layout; they could, for example, be 1882 obtained with VecDuplicate() from the DM originating vectors. 1883 1884 .keywords: DM, local-to-local, begin 1885 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1886 1887 @*/ 1888 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1889 { 1890 PetscErrorCode ierr; 1891 1892 PetscFunctionBegin; 1893 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1894 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1895 PetscFunctionReturn(0); 1896 } 1897 1898 #undef __FUNCT__ 1899 #define __FUNCT__ "DMLocalToLocalEnd" 1900 /*@ 1901 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1902 that contain irrelevant values) to another local vector where the ghost 1903 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1904 1905 Neighbor-wise Collective on DM and Vec 1906 1907 Input Parameters: 1908 + da - the DM object 1909 . g - the original local vector 1910 - mode - one of INSERT_VALUES or ADD_VALUES 1911 1912 Output Parameter: 1913 . l - the local vector with correct ghost values 1914 1915 Level: intermediate 1916 1917 Notes: 1918 The local vectors used here need not be the same as those 1919 obtained from DMCreateLocalVector(), BUT they 1920 must have the same parallel data layout; they could, for example, be 1921 obtained with VecDuplicate() from the DM originating vectors. 1922 1923 .keywords: DM, local-to-local, end 1924 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1925 1926 @*/ 1927 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1928 { 1929 PetscErrorCode ierr; 1930 1931 PetscFunctionBegin; 1932 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1933 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1934 PetscFunctionReturn(0); 1935 } 1936 1937 1938 #undef __FUNCT__ 1939 #define __FUNCT__ "DMCoarsen" 1940 /*@ 1941 DMCoarsen - Coarsens a DM object 1942 1943 Collective on DM 1944 1945 Input Parameter: 1946 + dm - the DM object 1947 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1948 1949 Output Parameter: 1950 . dmc - the coarsened DM 1951 1952 Level: developer 1953 1954 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1955 1956 @*/ 1957 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1958 { 1959 PetscErrorCode ierr; 1960 DMCoarsenHookLink link; 1961 1962 PetscFunctionBegin; 1963 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1964 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1965 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1966 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1967 (*dmc)->ctx = dm->ctx; 1968 (*dmc)->levelup = dm->levelup; 1969 (*dmc)->leveldown = dm->leveldown + 1; 1970 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1971 for (link=dm->coarsenhook; link; link=link->next) { 1972 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1973 } 1974 PetscFunctionReturn(0); 1975 } 1976 1977 #undef __FUNCT__ 1978 #define __FUNCT__ "DMCoarsenHookAdd" 1979 /*@C 1980 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1981 1982 Logically Collective 1983 1984 Input Arguments: 1985 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1986 . coarsenhook - function to run when setting up a coarser level 1987 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1988 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1989 1990 Calling sequence of coarsenhook: 1991 $ coarsenhook(DM fine,DM coarse,void *ctx); 1992 1993 + fine - fine level DM 1994 . coarse - coarse level DM to restrict problem to 1995 - ctx - optional user-defined function context 1996 1997 Calling sequence for restricthook: 1998 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1999 2000 + fine - fine level DM 2001 . mrestrict - matrix restricting a fine-level solution to the coarse grid 2002 . rscale - scaling vector for restriction 2003 . inject - matrix restricting by injection 2004 . coarse - coarse level DM to update 2005 - ctx - optional user-defined function context 2006 2007 Level: advanced 2008 2009 Notes: 2010 This function is only needed if auxiliary data needs to be set up on coarse grids. 2011 2012 If this function is called multiple times, the hooks will be run in the order they are added. 2013 2014 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2015 extract the finest level information from its context (instead of from the SNES). 2016 2017 This function is currently not available from Fortran. 2018 2019 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2020 @*/ 2021 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 2022 { 2023 PetscErrorCode ierr; 2024 DMCoarsenHookLink link,*p; 2025 2026 PetscFunctionBegin; 2027 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2028 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2029 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2030 link->coarsenhook = coarsenhook; 2031 link->restricthook = restricthook; 2032 link->ctx = ctx; 2033 link->next = NULL; 2034 *p = link; 2035 PetscFunctionReturn(0); 2036 } 2037 2038 #undef __FUNCT__ 2039 #define __FUNCT__ "DMRestrict" 2040 /*@ 2041 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2042 2043 Collective if any hooks are 2044 2045 Input Arguments: 2046 + fine - finer DM to use as a base 2047 . restrct - restriction matrix, apply using MatRestrict() 2048 . inject - injection matrix, also use MatRestrict() 2049 - coarse - coarer DM to update 2050 2051 Level: developer 2052 2053 .seealso: DMCoarsenHookAdd(), MatRestrict() 2054 @*/ 2055 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2056 { 2057 PetscErrorCode ierr; 2058 DMCoarsenHookLink link; 2059 2060 PetscFunctionBegin; 2061 for (link=fine->coarsenhook; link; link=link->next) { 2062 if (link->restricthook) { 2063 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2064 } 2065 } 2066 PetscFunctionReturn(0); 2067 } 2068 2069 #undef __FUNCT__ 2070 #define __FUNCT__ "DMSubDomainHookAdd" 2071 /*@C 2072 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2073 2074 Logically Collective 2075 2076 Input Arguments: 2077 + global - global DM 2078 . ddhook - function to run to pass data to the decomposition DM upon its creation 2079 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2080 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2081 2082 2083 Calling sequence for ddhook: 2084 $ ddhook(DM global,DM block,void *ctx) 2085 2086 + global - global DM 2087 . block - block DM 2088 - ctx - optional user-defined function context 2089 2090 Calling sequence for restricthook: 2091 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2092 2093 + global - global DM 2094 . out - scatter to the outer (with ghost and overlap points) block vector 2095 . in - scatter to block vector values only owned locally 2096 . block - block DM 2097 - ctx - optional user-defined function context 2098 2099 Level: advanced 2100 2101 Notes: 2102 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2103 2104 If this function is called multiple times, the hooks will be run in the order they are added. 2105 2106 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2107 extract the global information from its context (instead of from the SNES). 2108 2109 This function is currently not available from Fortran. 2110 2111 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2112 @*/ 2113 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2114 { 2115 PetscErrorCode ierr; 2116 DMSubDomainHookLink link,*p; 2117 2118 PetscFunctionBegin; 2119 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2120 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2121 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2122 link->restricthook = restricthook; 2123 link->ddhook = ddhook; 2124 link->ctx = ctx; 2125 link->next = NULL; 2126 *p = link; 2127 PetscFunctionReturn(0); 2128 } 2129 2130 #undef __FUNCT__ 2131 #define __FUNCT__ "DMSubDomainRestrict" 2132 /*@ 2133 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2134 2135 Collective if any hooks are 2136 2137 Input Arguments: 2138 + fine - finer DM to use as a base 2139 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2140 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2141 - coarse - coarer DM to update 2142 2143 Level: developer 2144 2145 .seealso: DMCoarsenHookAdd(), MatRestrict() 2146 @*/ 2147 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2148 { 2149 PetscErrorCode ierr; 2150 DMSubDomainHookLink link; 2151 2152 PetscFunctionBegin; 2153 for (link=global->subdomainhook; link; link=link->next) { 2154 if (link->restricthook) { 2155 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2156 } 2157 } 2158 PetscFunctionReturn(0); 2159 } 2160 2161 #undef __FUNCT__ 2162 #define __FUNCT__ "DMGetCoarsenLevel" 2163 /*@ 2164 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2165 2166 Not Collective 2167 2168 Input Parameter: 2169 . dm - the DM object 2170 2171 Output Parameter: 2172 . level - number of coarsenings 2173 2174 Level: developer 2175 2176 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2177 2178 @*/ 2179 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2180 { 2181 PetscFunctionBegin; 2182 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2183 *level = dm->leveldown; 2184 PetscFunctionReturn(0); 2185 } 2186 2187 2188 2189 #undef __FUNCT__ 2190 #define __FUNCT__ "DMRefineHierarchy" 2191 /*@C 2192 DMRefineHierarchy - Refines a DM object, all levels at once 2193 2194 Collective on DM 2195 2196 Input Parameter: 2197 + dm - the DM object 2198 - nlevels - the number of levels of refinement 2199 2200 Output Parameter: 2201 . dmf - the refined DM hierarchy 2202 2203 Level: developer 2204 2205 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2206 2207 @*/ 2208 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2209 { 2210 PetscErrorCode ierr; 2211 2212 PetscFunctionBegin; 2213 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2214 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2215 if (nlevels == 0) PetscFunctionReturn(0); 2216 if (dm->ops->refinehierarchy) { 2217 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2218 } else if (dm->ops->refine) { 2219 PetscInt i; 2220 2221 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2222 for (i=1; i<nlevels; i++) { 2223 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2224 } 2225 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2226 PetscFunctionReturn(0); 2227 } 2228 2229 #undef __FUNCT__ 2230 #define __FUNCT__ "DMCoarsenHierarchy" 2231 /*@C 2232 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2233 2234 Collective on DM 2235 2236 Input Parameter: 2237 + dm - the DM object 2238 - nlevels - the number of levels of coarsening 2239 2240 Output Parameter: 2241 . dmc - the coarsened DM hierarchy 2242 2243 Level: developer 2244 2245 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2246 2247 @*/ 2248 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2249 { 2250 PetscErrorCode ierr; 2251 2252 PetscFunctionBegin; 2253 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2254 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2255 if (nlevels == 0) PetscFunctionReturn(0); 2256 PetscValidPointer(dmc,3); 2257 if (dm->ops->coarsenhierarchy) { 2258 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2259 } else if (dm->ops->coarsen) { 2260 PetscInt i; 2261 2262 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2263 for (i=1; i<nlevels; i++) { 2264 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2265 } 2266 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2267 PetscFunctionReturn(0); 2268 } 2269 2270 #undef __FUNCT__ 2271 #define __FUNCT__ "DMCreateAggregates" 2272 /*@ 2273 DMCreateAggregates - Gets the aggregates that map between 2274 grids associated with two DMs. 2275 2276 Collective on DM 2277 2278 Input Parameters: 2279 + dmc - the coarse grid DM 2280 - dmf - the fine grid DM 2281 2282 Output Parameters: 2283 . rest - the restriction matrix (transpose of the projection matrix) 2284 2285 Level: intermediate 2286 2287 .keywords: interpolation, restriction, multigrid 2288 2289 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2290 @*/ 2291 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2292 { 2293 PetscErrorCode ierr; 2294 2295 PetscFunctionBegin; 2296 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2297 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2298 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2299 PetscFunctionReturn(0); 2300 } 2301 2302 #undef __FUNCT__ 2303 #define __FUNCT__ "DMSetApplicationContextDestroy" 2304 /*@C 2305 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2306 2307 Not Collective 2308 2309 Input Parameters: 2310 + dm - the DM object 2311 - destroy - the destroy function 2312 2313 Level: intermediate 2314 2315 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2316 2317 @*/ 2318 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2319 { 2320 PetscFunctionBegin; 2321 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2322 dm->ctxdestroy = destroy; 2323 PetscFunctionReturn(0); 2324 } 2325 2326 #undef __FUNCT__ 2327 #define __FUNCT__ "DMSetApplicationContext" 2328 /*@ 2329 DMSetApplicationContext - Set a user context into a DM object 2330 2331 Not Collective 2332 2333 Input Parameters: 2334 + dm - the DM object 2335 - ctx - the user context 2336 2337 Level: intermediate 2338 2339 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2340 2341 @*/ 2342 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2343 { 2344 PetscFunctionBegin; 2345 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2346 dm->ctx = ctx; 2347 PetscFunctionReturn(0); 2348 } 2349 2350 #undef __FUNCT__ 2351 #define __FUNCT__ "DMGetApplicationContext" 2352 /*@ 2353 DMGetApplicationContext - Gets a user context from a DM object 2354 2355 Not Collective 2356 2357 Input Parameter: 2358 . dm - the DM object 2359 2360 Output Parameter: 2361 . ctx - the user context 2362 2363 Level: intermediate 2364 2365 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2366 2367 @*/ 2368 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2369 { 2370 PetscFunctionBegin; 2371 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2372 *(void**)ctx = dm->ctx; 2373 PetscFunctionReturn(0); 2374 } 2375 2376 #undef __FUNCT__ 2377 #define __FUNCT__ "DMSetVariableBounds" 2378 /*@C 2379 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2380 2381 Logically Collective on DM 2382 2383 Input Parameter: 2384 + dm - the DM object 2385 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2386 2387 Level: intermediate 2388 2389 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2390 DMSetJacobian() 2391 2392 @*/ 2393 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2394 { 2395 PetscFunctionBegin; 2396 dm->ops->computevariablebounds = f; 2397 PetscFunctionReturn(0); 2398 } 2399 2400 #undef __FUNCT__ 2401 #define __FUNCT__ "DMHasVariableBounds" 2402 /*@ 2403 DMHasVariableBounds - does the DM object have a variable bounds function? 2404 2405 Not Collective 2406 2407 Input Parameter: 2408 . dm - the DM object to destroy 2409 2410 Output Parameter: 2411 . flg - PETSC_TRUE if the variable bounds function exists 2412 2413 Level: developer 2414 2415 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2416 2417 @*/ 2418 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2419 { 2420 PetscFunctionBegin; 2421 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2422 PetscFunctionReturn(0); 2423 } 2424 2425 #undef __FUNCT__ 2426 #define __FUNCT__ "DMComputeVariableBounds" 2427 /*@C 2428 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2429 2430 Logically Collective on DM 2431 2432 Input Parameters: 2433 + dm - the DM object to destroy 2434 - x - current solution at which the bounds are computed 2435 2436 Output parameters: 2437 + xl - lower bound 2438 - xu - upper bound 2439 2440 Level: intermediate 2441 2442 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2443 2444 @*/ 2445 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2446 { 2447 PetscErrorCode ierr; 2448 2449 PetscFunctionBegin; 2450 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2451 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2452 if (dm->ops->computevariablebounds) { 2453 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2454 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2455 PetscFunctionReturn(0); 2456 } 2457 2458 #undef __FUNCT__ 2459 #define __FUNCT__ "DMHasColoring" 2460 /*@ 2461 DMHasColoring - does the DM object have a method of providing a coloring? 2462 2463 Not Collective 2464 2465 Input Parameter: 2466 . dm - the DM object 2467 2468 Output Parameter: 2469 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2470 2471 Level: developer 2472 2473 .seealso DMHasFunction(), DMCreateColoring() 2474 2475 @*/ 2476 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2477 { 2478 PetscFunctionBegin; 2479 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2480 PetscFunctionReturn(0); 2481 } 2482 2483 #undef __FUNCT__ 2484 #define __FUNCT__ "DMSetVec" 2485 /*@C 2486 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2487 2488 Collective on DM 2489 2490 Input Parameter: 2491 + dm - the DM object 2492 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2493 2494 Level: developer 2495 2496 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2497 2498 @*/ 2499 PetscErrorCode DMSetVec(DM dm,Vec x) 2500 { 2501 PetscErrorCode ierr; 2502 2503 PetscFunctionBegin; 2504 if (x) { 2505 if (!dm->x) { 2506 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2507 } 2508 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2509 } else if (dm->x) { 2510 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2511 } 2512 PetscFunctionReturn(0); 2513 } 2514 2515 PetscFunctionList DMList = NULL; 2516 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2517 2518 #undef __FUNCT__ 2519 #define __FUNCT__ "DMSetType" 2520 /*@C 2521 DMSetType - Builds a DM, for a particular DM implementation. 2522 2523 Collective on DM 2524 2525 Input Parameters: 2526 + dm - The DM object 2527 - method - The name of the DM type 2528 2529 Options Database Key: 2530 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2531 2532 Notes: 2533 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2534 2535 Level: intermediate 2536 2537 .keywords: DM, set, type 2538 .seealso: DMGetType(), DMCreate() 2539 @*/ 2540 PetscErrorCode DMSetType(DM dm, DMType method) 2541 { 2542 PetscErrorCode (*r)(DM); 2543 PetscBool match; 2544 PetscErrorCode ierr; 2545 2546 PetscFunctionBegin; 2547 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2548 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2549 if (match) PetscFunctionReturn(0); 2550 2551 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2552 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2553 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2554 2555 if (dm->ops->destroy) { 2556 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2557 dm->ops->destroy = NULL; 2558 } 2559 ierr = (*r)(dm);CHKERRQ(ierr); 2560 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2561 PetscFunctionReturn(0); 2562 } 2563 2564 #undef __FUNCT__ 2565 #define __FUNCT__ "DMGetType" 2566 /*@C 2567 DMGetType - Gets the DM type name (as a string) from the DM. 2568 2569 Not Collective 2570 2571 Input Parameter: 2572 . dm - The DM 2573 2574 Output Parameter: 2575 . type - The DM type name 2576 2577 Level: intermediate 2578 2579 .keywords: DM, get, type, name 2580 .seealso: DMSetType(), DMCreate() 2581 @*/ 2582 PetscErrorCode DMGetType(DM dm, DMType *type) 2583 { 2584 PetscErrorCode ierr; 2585 2586 PetscFunctionBegin; 2587 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2588 PetscValidCharPointer(type,2); 2589 if (!DMRegisterAllCalled) { 2590 ierr = DMRegisterAll();CHKERRQ(ierr); 2591 } 2592 *type = ((PetscObject)dm)->type_name; 2593 PetscFunctionReturn(0); 2594 } 2595 2596 #undef __FUNCT__ 2597 #define __FUNCT__ "DMConvert" 2598 /*@C 2599 DMConvert - Converts a DM to another DM, either of the same or different type. 2600 2601 Collective on DM 2602 2603 Input Parameters: 2604 + dm - the DM 2605 - newtype - new DM type (use "same" for the same type) 2606 2607 Output Parameter: 2608 . M - pointer to new DM 2609 2610 Notes: 2611 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2612 the MPI communicator of the generated DM is always the same as the communicator 2613 of the input DM. 2614 2615 Level: intermediate 2616 2617 .seealso: DMCreate() 2618 @*/ 2619 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2620 { 2621 DM B; 2622 char convname[256]; 2623 PetscBool sametype, issame; 2624 PetscErrorCode ierr; 2625 2626 PetscFunctionBegin; 2627 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2628 PetscValidType(dm,1); 2629 PetscValidPointer(M,3); 2630 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2631 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2632 { 2633 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2634 2635 /* 2636 Order of precedence: 2637 1) See if a specialized converter is known to the current DM. 2638 2) See if a specialized converter is known to the desired DM class. 2639 3) See if a good general converter is registered for the desired class 2640 4) See if a good general converter is known for the current matrix. 2641 5) Use a really basic converter. 2642 */ 2643 2644 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2645 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2646 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2647 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2648 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2649 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2650 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2651 if (conv) goto foundconv; 2652 2653 /* 2) See if a specialized converter is known to the desired DM class. */ 2654 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2655 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2656 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2657 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2658 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2659 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2660 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2661 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2662 if (conv) { 2663 ierr = DMDestroy(&B);CHKERRQ(ierr); 2664 goto foundconv; 2665 } 2666 2667 #if 0 2668 /* 3) See if a good general converter is registered for the desired class */ 2669 conv = B->ops->convertfrom; 2670 ierr = DMDestroy(&B);CHKERRQ(ierr); 2671 if (conv) goto foundconv; 2672 2673 /* 4) See if a good general converter is known for the current matrix */ 2674 if (dm->ops->convert) { 2675 conv = dm->ops->convert; 2676 } 2677 if (conv) goto foundconv; 2678 #endif 2679 2680 /* 5) Use a really basic converter. */ 2681 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2682 2683 foundconv: 2684 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2685 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2686 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2687 } 2688 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2689 PetscFunctionReturn(0); 2690 } 2691 2692 /*--------------------------------------------------------------------------------------------------------------------*/ 2693 2694 #undef __FUNCT__ 2695 #define __FUNCT__ "DMRegister" 2696 /*@C 2697 DMRegister - Adds a new DM component implementation 2698 2699 Not Collective 2700 2701 Input Parameters: 2702 + name - The name of a new user-defined creation routine 2703 - create_func - The creation routine itself 2704 2705 Notes: 2706 DMRegister() may be called multiple times to add several user-defined DMs 2707 2708 2709 Sample usage: 2710 .vb 2711 DMRegister("my_da", MyDMCreate); 2712 .ve 2713 2714 Then, your DM type can be chosen with the procedural interface via 2715 .vb 2716 DMCreate(MPI_Comm, DM *); 2717 DMSetType(DM,"my_da"); 2718 .ve 2719 or at runtime via the option 2720 .vb 2721 -da_type my_da 2722 .ve 2723 2724 Level: advanced 2725 2726 .keywords: DM, register 2727 .seealso: DMRegisterAll(), DMRegisterDestroy() 2728 2729 @*/ 2730 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2731 { 2732 PetscErrorCode ierr; 2733 2734 PetscFunctionBegin; 2735 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2736 PetscFunctionReturn(0); 2737 } 2738 2739 #undef __FUNCT__ 2740 #define __FUNCT__ "DMLoad" 2741 /*@C 2742 DMLoad - Loads a DM that has been stored in binary with DMView(). 2743 2744 Collective on PetscViewer 2745 2746 Input Parameters: 2747 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2748 some related function before a call to DMLoad(). 2749 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2750 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2751 2752 Level: intermediate 2753 2754 Notes: 2755 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2756 2757 Notes for advanced users: 2758 Most users should not need to know the details of the binary storage 2759 format, since DMLoad() and DMView() completely hide these details. 2760 But for anyone who's interested, the standard binary matrix storage 2761 format is 2762 .vb 2763 has not yet been determined 2764 .ve 2765 2766 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2767 @*/ 2768 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2769 { 2770 PetscBool isbinary, ishdf5; 2771 PetscErrorCode ierr; 2772 2773 PetscFunctionBegin; 2774 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2775 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2776 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2777 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);CHKERRQ(ierr); 2778 if (isbinary) { 2779 PetscInt classid; 2780 char type[256]; 2781 2782 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2783 if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid); 2784 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2785 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2786 if (newdm->ops->load) {ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);} 2787 } else if (ishdf5) { 2788 if (newdm->ops->load) {ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);} 2789 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen() or PetscViewerHDF5Open()"); 2790 PetscFunctionReturn(0); 2791 } 2792 2793 /******************************** FEM Support **********************************/ 2794 2795 #undef __FUNCT__ 2796 #define __FUNCT__ "DMPrintCellVector" 2797 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2798 { 2799 PetscInt f; 2800 PetscErrorCode ierr; 2801 2802 PetscFunctionBegin; 2803 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2804 for (f = 0; f < len; ++f) { 2805 ierr = PetscPrintf(PETSC_COMM_SELF, " | %g |\n", (double)PetscRealPart(x[f]));CHKERRQ(ierr); 2806 } 2807 PetscFunctionReturn(0); 2808 } 2809 2810 #undef __FUNCT__ 2811 #define __FUNCT__ "DMPrintCellMatrix" 2812 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2813 { 2814 PetscInt f, g; 2815 PetscErrorCode ierr; 2816 2817 PetscFunctionBegin; 2818 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2819 for (f = 0; f < rows; ++f) { 2820 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2821 for (g = 0; g < cols; ++g) { 2822 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5g", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2823 } 2824 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2825 } 2826 PetscFunctionReturn(0); 2827 } 2828 2829 #undef __FUNCT__ 2830 #define __FUNCT__ "DMPrintLocalVec" 2831 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X) 2832 { 2833 PetscMPIInt rank, numProcs; 2834 PetscInt p; 2835 PetscErrorCode ierr; 2836 2837 PetscFunctionBegin; 2838 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2839 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 2840 ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr); 2841 for (p = 0; p < numProcs; ++p) { 2842 if (p == rank) { 2843 Vec x; 2844 2845 ierr = VecDuplicate(X, &x);CHKERRQ(ierr); 2846 ierr = VecCopy(X, x);CHKERRQ(ierr); 2847 ierr = VecChop(x, tol);CHKERRQ(ierr); 2848 ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2849 ierr = VecDestroy(&x);CHKERRQ(ierr); 2850 ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2851 } 2852 ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr); 2853 } 2854 PetscFunctionReturn(0); 2855 } 2856 2857 #undef __FUNCT__ 2858 #define __FUNCT__ "DMGetDefaultSection" 2859 /*@ 2860 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2861 2862 Input Parameter: 2863 . dm - The DM 2864 2865 Output Parameter: 2866 . section - The PetscSection 2867 2868 Level: intermediate 2869 2870 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2871 2872 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2873 @*/ 2874 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2875 { 2876 PetscErrorCode ierr; 2877 2878 PetscFunctionBegin; 2879 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2880 PetscValidPointer(section, 2); 2881 if (!dm->defaultSection && dm->ops->createdefaultsection) {ierr = (*dm->ops->createdefaultsection)(dm);CHKERRQ(ierr);} 2882 *section = dm->defaultSection; 2883 PetscFunctionReturn(0); 2884 } 2885 2886 #undef __FUNCT__ 2887 #define __FUNCT__ "DMSetDefaultSection" 2888 /*@ 2889 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2890 2891 Input Parameters: 2892 + dm - The DM 2893 - section - The PetscSection 2894 2895 Level: intermediate 2896 2897 Note: Any existing Section will be destroyed 2898 2899 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2900 @*/ 2901 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2902 { 2903 PetscInt numFields; 2904 PetscInt f; 2905 PetscErrorCode ierr; 2906 2907 PetscFunctionBegin; 2908 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2909 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2910 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2911 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2912 dm->defaultSection = section; 2913 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2914 if (numFields) { 2915 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2916 for (f = 0; f < numFields; ++f) { 2917 const char *name; 2918 2919 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2920 ierr = PetscObjectSetName((PetscObject) dm->fields[f], name);CHKERRQ(ierr); 2921 } 2922 } 2923 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2924 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2925 PetscFunctionReturn(0); 2926 } 2927 2928 #undef __FUNCT__ 2929 #define __FUNCT__ "DMGetDefaultGlobalSection" 2930 /*@ 2931 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2932 2933 Collective on DM 2934 2935 Input Parameter: 2936 . dm - The DM 2937 2938 Output Parameter: 2939 . section - The PetscSection 2940 2941 Level: intermediate 2942 2943 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2944 2945 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2946 @*/ 2947 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2948 { 2949 PetscErrorCode ierr; 2950 2951 PetscFunctionBegin; 2952 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2953 PetscValidPointer(section, 2); 2954 if (!dm->defaultGlobalSection) { 2955 PetscSection s; 2956 2957 ierr = DMGetDefaultSection(dm, &s);CHKERRQ(ierr); 2958 if (!s) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection in order to create a global PetscSection"); 2959 if (!dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSF in order to create a global PetscSection"); 2960 ierr = PetscSectionCreateGlobalSection(s, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2961 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2962 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm), dm->defaultGlobalSection, &dm->map);CHKERRQ(ierr); 2963 } 2964 *section = dm->defaultGlobalSection; 2965 PetscFunctionReturn(0); 2966 } 2967 2968 #undef __FUNCT__ 2969 #define __FUNCT__ "DMSetDefaultGlobalSection" 2970 /*@ 2971 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2972 2973 Input Parameters: 2974 + dm - The DM 2975 - section - The PetscSection, or NULL 2976 2977 Level: intermediate 2978 2979 Note: Any existing Section will be destroyed 2980 2981 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2982 @*/ 2983 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2984 { 2985 PetscErrorCode ierr; 2986 2987 PetscFunctionBegin; 2988 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2989 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2990 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2991 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2992 dm->defaultGlobalSection = section; 2993 PetscFunctionReturn(0); 2994 } 2995 2996 #undef __FUNCT__ 2997 #define __FUNCT__ "DMGetDefaultSF" 2998 /*@ 2999 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3000 it is created from the default PetscSection layouts in the DM. 3001 3002 Input Parameter: 3003 . dm - The DM 3004 3005 Output Parameter: 3006 . sf - The PetscSF 3007 3008 Level: intermediate 3009 3010 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3011 3012 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3013 @*/ 3014 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 3015 { 3016 PetscInt nroots; 3017 PetscErrorCode ierr; 3018 3019 PetscFunctionBegin; 3020 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3021 PetscValidPointer(sf, 2); 3022 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 3023 if (nroots < 0) { 3024 PetscSection section, gSection; 3025 3026 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3027 if (section) { 3028 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3029 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3030 } else { 3031 *sf = NULL; 3032 PetscFunctionReturn(0); 3033 } 3034 } 3035 *sf = dm->defaultSF; 3036 PetscFunctionReturn(0); 3037 } 3038 3039 #undef __FUNCT__ 3040 #define __FUNCT__ "DMSetDefaultSF" 3041 /*@ 3042 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3043 3044 Input Parameters: 3045 + dm - The DM 3046 - sf - The PetscSF 3047 3048 Level: intermediate 3049 3050 Note: Any previous SF is destroyed 3051 3052 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3053 @*/ 3054 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 3055 { 3056 PetscErrorCode ierr; 3057 3058 PetscFunctionBegin; 3059 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3060 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3061 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3062 dm->defaultSF = sf; 3063 PetscFunctionReturn(0); 3064 } 3065 3066 #undef __FUNCT__ 3067 #define __FUNCT__ "DMCreateDefaultSF" 3068 /*@C 3069 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3070 describing the data layout. 3071 3072 Input Parameters: 3073 + dm - The DM 3074 . localSection - PetscSection describing the local data layout 3075 - globalSection - PetscSection describing the global data layout 3076 3077 Level: intermediate 3078 3079 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3080 @*/ 3081 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3082 { 3083 MPI_Comm comm; 3084 PetscLayout layout; 3085 const PetscInt *ranges; 3086 PetscInt *local; 3087 PetscSFNode *remote; 3088 PetscInt pStart, pEnd, p, nroots, nleaves = 0, l; 3089 PetscMPIInt size, rank; 3090 PetscErrorCode ierr; 3091 3092 PetscFunctionBegin; 3093 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3094 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3095 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3096 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3097 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3098 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3099 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3100 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3101 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3102 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3103 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3104 for (p = pStart; p < pEnd; ++p) { 3105 PetscInt gdof, gcdof; 3106 3107 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3108 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3109 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3110 } 3111 ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr); 3112 ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr); 3113 for (p = pStart, l = 0; p < pEnd; ++p) { 3114 const PetscInt *cind; 3115 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3116 3117 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3118 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3119 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3120 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3121 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3122 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3123 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3124 if (!gdof) continue; /* Censored point */ 3125 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3126 if (gsize != dof-cdof) { 3127 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3128 cdof = 0; /* Ignore constraints */ 3129 } 3130 for (d = 0, c = 0; d < dof; ++d) { 3131 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3132 local[l+d-c] = off+d; 3133 } 3134 if (gdof < 0) { 3135 for (d = 0; d < gsize; ++d, ++l) { 3136 PetscInt offset = -(goff+1) + d, r; 3137 3138 ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr); 3139 if (r < 0) r = -(r+2); 3140 if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff); 3141 remote[l].rank = r; 3142 remote[l].index = offset - ranges[r]; 3143 } 3144 } else { 3145 for (d = 0; d < gsize; ++d, ++l) { 3146 remote[l].rank = rank; 3147 remote[l].index = goff+d - ranges[rank]; 3148 } 3149 } 3150 } 3151 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3152 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3153 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3154 PetscFunctionReturn(0); 3155 } 3156 3157 #undef __FUNCT__ 3158 #define __FUNCT__ "DMGetPointSF" 3159 /*@ 3160 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3161 3162 Input Parameter: 3163 . dm - The DM 3164 3165 Output Parameter: 3166 . sf - The PetscSF 3167 3168 Level: intermediate 3169 3170 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3171 3172 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3173 @*/ 3174 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3175 { 3176 PetscFunctionBegin; 3177 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3178 PetscValidPointer(sf, 2); 3179 *sf = dm->sf; 3180 PetscFunctionReturn(0); 3181 } 3182 3183 #undef __FUNCT__ 3184 #define __FUNCT__ "DMSetPointSF" 3185 /*@ 3186 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3187 3188 Input Parameters: 3189 + dm - The DM 3190 - sf - The PetscSF 3191 3192 Level: intermediate 3193 3194 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3195 @*/ 3196 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3197 { 3198 PetscErrorCode ierr; 3199 3200 PetscFunctionBegin; 3201 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3202 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3203 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3204 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3205 dm->sf = sf; 3206 PetscFunctionReturn(0); 3207 } 3208 3209 #undef __FUNCT__ 3210 #define __FUNCT__ "DMGetNumFields" 3211 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3212 { 3213 PetscFunctionBegin; 3214 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3215 PetscValidPointer(numFields, 2); 3216 *numFields = dm->numFields; 3217 PetscFunctionReturn(0); 3218 } 3219 3220 #undef __FUNCT__ 3221 #define __FUNCT__ "DMSetNumFields" 3222 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3223 { 3224 PetscInt f; 3225 PetscErrorCode ierr; 3226 3227 PetscFunctionBegin; 3228 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3229 if (dm->numFields == numFields) PetscFunctionReturn(0); 3230 for (f = 0; f < dm->numFields; ++f) { 3231 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3232 } 3233 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3234 dm->numFields = numFields; 3235 ierr = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr); 3236 for (f = 0; f < dm->numFields; ++f) { 3237 ierr = PetscContainerCreate(PetscObjectComm((PetscObject) dm), (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3238 } 3239 PetscFunctionReturn(0); 3240 } 3241 3242 #undef __FUNCT__ 3243 #define __FUNCT__ "DMGetField" 3244 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3245 { 3246 PetscFunctionBegin; 3247 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3248 PetscValidPointer(field, 3); 3249 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3250 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3251 *field = (PetscObject) dm->fields[f]; 3252 PetscFunctionReturn(0); 3253 } 3254 3255 #undef __FUNCT__ 3256 #define __FUNCT__ "DMSetField" 3257 PetscErrorCode DMSetField(DM dm, PetscInt f, PetscObject field) 3258 { 3259 PetscErrorCode ierr; 3260 3261 PetscFunctionBegin; 3262 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3263 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3264 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3265 if (((PetscObject) dm->fields[f]) == field) PetscFunctionReturn(0); 3266 ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr); 3267 dm->fields[f] = (PetscFE) field; 3268 ierr = PetscObjectReference((PetscObject) dm->fields[f]);CHKERRQ(ierr); 3269 PetscFunctionReturn(0); 3270 } 3271 3272 #undef __FUNCT__ 3273 #define __FUNCT__ "DMRestrictHook_Coordinates" 3274 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3275 { 3276 DM dm_coord,dmc_coord; 3277 PetscErrorCode ierr; 3278 Vec coords,ccoords; 3279 VecScatter scat; 3280 PetscFunctionBegin; 3281 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3282 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3283 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3284 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3285 if (coords && !ccoords) { 3286 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3287 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3288 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3289 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3290 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3291 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3292 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3293 } 3294 PetscFunctionReturn(0); 3295 } 3296 3297 #undef __FUNCT__ 3298 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3299 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3300 { 3301 DM dm_coord,subdm_coord; 3302 PetscErrorCode ierr; 3303 Vec coords,ccoords,clcoords; 3304 VecScatter *scat_i,*scat_g; 3305 PetscFunctionBegin; 3306 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3307 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3308 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3309 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3310 if (coords && !ccoords) { 3311 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3312 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3313 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3314 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3315 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3316 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3317 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3318 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3319 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3320 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3321 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3322 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3323 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3324 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3325 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3326 } 3327 PetscFunctionReturn(0); 3328 } 3329 3330 #undef __FUNCT__ 3331 #define __FUNCT__ "DMSetCoordinates" 3332 /*@ 3333 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3334 3335 Collective on DM 3336 3337 Input Parameters: 3338 + dm - the DM 3339 - c - coordinate vector 3340 3341 Note: 3342 The coordinates do include those for ghost points, which are in the local vector 3343 3344 Level: intermediate 3345 3346 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3347 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3348 @*/ 3349 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3350 { 3351 PetscErrorCode ierr; 3352 3353 PetscFunctionBegin; 3354 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3355 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3356 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3357 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3358 dm->coordinates = c; 3359 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3360 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3361 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3362 PetscFunctionReturn(0); 3363 } 3364 3365 #undef __FUNCT__ 3366 #define __FUNCT__ "DMSetCoordinatesLocal" 3367 /*@ 3368 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3369 3370 Collective on DM 3371 3372 Input Parameters: 3373 + dm - the DM 3374 - c - coordinate vector 3375 3376 Note: 3377 The coordinates of ghost points can be set using DMSetCoordinates() 3378 followed by DMGetCoordinatesLocal(). This is intended to enable the 3379 setting of ghost coordinates outside of the domain. 3380 3381 Level: intermediate 3382 3383 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3384 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3385 @*/ 3386 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3387 { 3388 PetscErrorCode ierr; 3389 3390 PetscFunctionBegin; 3391 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3392 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3393 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3394 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3395 3396 dm->coordinatesLocal = c; 3397 3398 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3399 PetscFunctionReturn(0); 3400 } 3401 3402 #undef __FUNCT__ 3403 #define __FUNCT__ "DMGetCoordinates" 3404 /*@ 3405 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3406 3407 Not Collective 3408 3409 Input Parameter: 3410 . dm - the DM 3411 3412 Output Parameter: 3413 . c - global coordinate vector 3414 3415 Note: 3416 This is a borrowed reference, so the user should NOT destroy this vector 3417 3418 Each process has only the local coordinates (does NOT have the ghost coordinates). 3419 3420 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3421 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3422 3423 Level: intermediate 3424 3425 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3426 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3427 @*/ 3428 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3429 { 3430 PetscErrorCode ierr; 3431 3432 PetscFunctionBegin; 3433 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3434 PetscValidPointer(c,2); 3435 if (!dm->coordinates && dm->coordinatesLocal) { 3436 DM cdm = NULL; 3437 3438 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3439 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3440 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3441 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3442 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3443 } 3444 *c = dm->coordinates; 3445 PetscFunctionReturn(0); 3446 } 3447 3448 #undef __FUNCT__ 3449 #define __FUNCT__ "DMGetCoordinatesLocal" 3450 /*@ 3451 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3452 3453 Collective on DM 3454 3455 Input Parameter: 3456 . dm - the DM 3457 3458 Output Parameter: 3459 . c - coordinate vector 3460 3461 Note: 3462 This is a borrowed reference, so the user should NOT destroy this vector 3463 3464 Each process has the local and ghost coordinates 3465 3466 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3467 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3468 3469 Level: intermediate 3470 3471 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3472 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3473 @*/ 3474 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3475 { 3476 PetscErrorCode ierr; 3477 3478 PetscFunctionBegin; 3479 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3480 PetscValidPointer(c,2); 3481 if (!dm->coordinatesLocal && dm->coordinates) { 3482 DM cdm = NULL; 3483 3484 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3485 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3486 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3487 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3488 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3489 } 3490 *c = dm->coordinatesLocal; 3491 PetscFunctionReturn(0); 3492 } 3493 3494 #undef __FUNCT__ 3495 #define __FUNCT__ "DMGetCoordinateDM" 3496 /*@ 3497 DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates 3498 3499 Collective on DM 3500 3501 Input Parameter: 3502 . dm - the DM 3503 3504 Output Parameter: 3505 . cdm - coordinate DM 3506 3507 Level: intermediate 3508 3509 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3510 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3511 @*/ 3512 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3513 { 3514 PetscErrorCode ierr; 3515 3516 PetscFunctionBegin; 3517 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3518 PetscValidPointer(cdm,2); 3519 if (!dm->coordinateDM) { 3520 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3521 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3522 } 3523 *cdm = dm->coordinateDM; 3524 PetscFunctionReturn(0); 3525 } 3526 3527 #undef __FUNCT__ 3528 #define __FUNCT__ "DMSetCoordinateDM" 3529 /*@ 3530 DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates 3531 3532 Logically Collective on DM 3533 3534 Input Parameters: 3535 + dm - the DM 3536 - cdm - coordinate DM 3537 3538 Level: intermediate 3539 3540 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3541 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3542 @*/ 3543 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm) 3544 { 3545 PetscErrorCode ierr; 3546 3547 PetscFunctionBegin; 3548 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3549 PetscValidHeaderSpecific(cdm,DM_CLASSID,2); 3550 ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr); 3551 dm->coordinateDM = cdm; 3552 ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr); 3553 PetscFunctionReturn(0); 3554 } 3555 3556 #undef __FUNCT__ 3557 #define __FUNCT__ "DMGetCoordinateSection" 3558 /*@ 3559 DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh. 3560 3561 Not Collective 3562 3563 Input Parameter: 3564 . dm - The DM object 3565 3566 Output Parameter: 3567 . section - The PetscSection object 3568 3569 Level: intermediate 3570 3571 .keywords: mesh, coordinates 3572 .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection() 3573 @*/ 3574 PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section) 3575 { 3576 DM cdm; 3577 PetscErrorCode ierr; 3578 3579 PetscFunctionBegin; 3580 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3581 PetscValidPointer(section, 2); 3582 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3583 ierr = DMGetDefaultSection(cdm, section);CHKERRQ(ierr); 3584 PetscFunctionReturn(0); 3585 } 3586 3587 #undef __FUNCT__ 3588 #define __FUNCT__ "DMSetCoordinateSection" 3589 /*@ 3590 DMSetCoordinateSection - Set the layout of coordinate values over the mesh. 3591 3592 Not Collective 3593 3594 Input Parameters: 3595 + dm - The DM object 3596 - section - The PetscSection object 3597 3598 Level: intermediate 3599 3600 .keywords: mesh, coordinates 3601 .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection() 3602 @*/ 3603 PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section) 3604 { 3605 DM cdm; 3606 PetscErrorCode ierr; 3607 3608 PetscFunctionBegin; 3609 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3610 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 3611 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3612 ierr = DMSetDefaultSection(cdm, section);CHKERRQ(ierr); 3613 PetscFunctionReturn(0); 3614 } 3615 3616 #undef __FUNCT__ 3617 #define __FUNCT__ "DMGetPeriodicity" 3618 PetscErrorCode DMGetPeriodicity(DM dm, const PetscReal **maxCell, const PetscReal **L) 3619 { 3620 PetscFunctionBegin; 3621 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3622 if (L) *L = dm->L; 3623 if (maxCell) *maxCell = dm->maxCell; 3624 PetscFunctionReturn(0); 3625 } 3626 3627 #undef __FUNCT__ 3628 #define __FUNCT__ "DMSetPeriodicity" 3629 PetscErrorCode DMSetPeriodicity(DM dm, const PetscReal maxCell[], const PetscReal L[]) 3630 { 3631 Vec coordinates; 3632 PetscInt dim, d; 3633 PetscErrorCode ierr; 3634 3635 PetscFunctionBegin; 3636 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3637 ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr); 3638 ierr = VecGetBlockSize(coordinates, &dim);CHKERRQ(ierr); 3639 ierr = PetscMalloc1(dim,&dm->L);CHKERRQ(ierr); 3640 ierr = PetscMalloc1(dim,&dm->maxCell);CHKERRQ(ierr); 3641 for (d = 0; d < dim; ++d) {dm->L[d] = L[d]; dm->maxCell[d] = maxCell[d];} 3642 PetscFunctionReturn(0); 3643 } 3644 3645 #undef __FUNCT__ 3646 #define __FUNCT__ "DMLocatePoints" 3647 /*@ 3648 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3649 3650 Not collective 3651 3652 Input Parameters: 3653 + dm - The DM 3654 - v - The Vec of points 3655 3656 Output Parameter: 3657 . cells - The local cell numbers for cells which contain the points 3658 3659 Level: developer 3660 3661 .keywords: point location, mesh 3662 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3663 @*/ 3664 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3665 { 3666 PetscErrorCode ierr; 3667 3668 PetscFunctionBegin; 3669 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3670 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3671 PetscValidPointer(cells,3); 3672 if (dm->ops->locatepoints) { 3673 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3674 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3675 PetscFunctionReturn(0); 3676 } 3677 3678 #undef __FUNCT__ 3679 #define __FUNCT__ "DMGetOutputDM" 3680 /*@ 3681 DMGetOutputDM - Retrieve the DM associated with the layout for output 3682 3683 Input Parameter: 3684 . dm - The original DM 3685 3686 Output Parameter: 3687 . odm - The DM which provides the layout for output 3688 3689 Level: intermediate 3690 3691 .seealso: VecView(), DMGetDefaultGlobalSection() 3692 @*/ 3693 PetscErrorCode DMGetOutputDM(DM dm, DM *odm) 3694 { 3695 PetscSection section; 3696 PetscBool hasConstraints; 3697 PetscErrorCode ierr; 3698 3699 PetscFunctionBegin; 3700 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3701 PetscValidPointer(odm,2); 3702 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3703 ierr = PetscSectionHasConstraints(section, &hasConstraints);CHKERRQ(ierr); 3704 if (!hasConstraints) { 3705 *odm = dm; 3706 PetscFunctionReturn(0); 3707 } 3708 if (!dm->dmBC) { 3709 PetscSection newSection, gsection; 3710 PetscSF sf; 3711 3712 ierr = DMClone(dm, &dm->dmBC);CHKERRQ(ierr); 3713 ierr = PetscSectionClone(section, &newSection);CHKERRQ(ierr); 3714 ierr = DMSetDefaultSection(dm->dmBC, newSection);CHKERRQ(ierr); 3715 ierr = PetscSectionDestroy(&newSection);CHKERRQ(ierr); 3716 ierr = DMGetPointSF(dm->dmBC, &sf);CHKERRQ(ierr); 3717 ierr = PetscSectionCreateGlobalSection(section, sf, PETSC_TRUE, &gsection);CHKERRQ(ierr); 3718 ierr = DMSetDefaultGlobalSection(dm->dmBC, gsection);CHKERRQ(ierr); 3719 ierr = PetscSectionDestroy(&gsection);CHKERRQ(ierr); 3720 } 3721 *odm = dm->dmBC; 3722 PetscFunctionReturn(0); 3723 } 3724 3725 #undef __FUNCT__ 3726 #define __FUNCT__ "DMGetOutputSequenceNumber" 3727 /*@ 3728 DMGetOutputSequenceNumber - Retrieve the sequence number for output 3729 3730 Input Parameter: 3731 . dm - The original DM 3732 3733 Output Parameter: 3734 . num - The output sequence number 3735 3736 Level: intermediate 3737 3738 Note: This is intended for output that should appear in sequence, for instance 3739 a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system. 3740 3741 .seealso: VecView() 3742 @*/ 3743 PetscErrorCode DMGetOutputSequenceNumber(DM dm, PetscInt *num) 3744 { 3745 PetscFunctionBegin; 3746 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3747 PetscValidPointer(num,2); 3748 *num = dm->outputSequenceNum; 3749 PetscFunctionReturn(0); 3750 } 3751 3752 #undef __FUNCT__ 3753 #define __FUNCT__ "DMSetOutputSequenceNumber" 3754 /*@ 3755 DMSetOutputSequenceNumber - Set the sequence number for output 3756 3757 Input Parameters: 3758 + dm - The original DM 3759 - num - The output sequence number 3760 3761 Level: intermediate 3762 3763 Note: This is intended for output that should appear in sequence, for instance 3764 a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system. 3765 3766 .seealso: VecView() 3767 @*/ 3768 PetscErrorCode DMSetOutputSequenceNumber(DM dm, PetscInt num) 3769 { 3770 PetscFunctionBegin; 3771 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3772 dm->outputSequenceNum = num; 3773 PetscFunctionReturn(0); 3774 } 3775