1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMViewFromOptions" 9 /* 10 DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed. 11 12 Collective on Vec 13 14 Input Parameters: 15 + dm - the DM 16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm' 17 - optionname - option to activate viewing 18 19 Level: intermediate 20 21 .keywords: DM, view, options, database 22 .seealso: VecViewFromOptions(), MatViewFromOptions() 23 */ 24 PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[]) 25 { 26 PetscErrorCode ierr; 27 PetscBool flg; 28 PetscViewer viewer; 29 PetscViewerFormat format; 30 31 PetscFunctionBegin; 32 if (prefix) { 33 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 34 } else { 35 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 36 } 37 if (flg) { 38 ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr); 39 ierr = DMView(dm,viewer);CHKERRQ(ierr); 40 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 41 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 42 } 43 PetscFunctionReturn(0); 44 } 45 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMCreate" 49 /*@ 50 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 51 52 If you never call DMSetType() it will generate an 53 error when you try to use the vector. 54 55 Collective on MPI_Comm 56 57 Input Parameter: 58 . comm - The communicator for the DM object 59 60 Output Parameter: 61 . dm - The DM object 62 63 Level: beginner 64 65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 66 @*/ 67 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 68 { 69 DM v; 70 PetscErrorCode ierr; 71 72 PetscFunctionBegin; 73 PetscValidPointer(dm,2); 74 *dm = NULL; 75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 76 ierr = VecInitializePackage();CHKERRQ(ierr); 77 ierr = MatInitializePackage();CHKERRQ(ierr); 78 ierr = DMInitializePackage();CHKERRQ(ierr); 79 #endif 80 81 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 82 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 83 84 85 v->ltogmap = NULL; 86 v->ltogmapb = NULL; 87 v->bs = 1; 88 v->coloringtype = IS_COLORING_GLOBAL; 89 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 90 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 91 v->defaultSection = NULL; 92 v->defaultGlobalSection = NULL; 93 { 94 PetscInt i; 95 for (i = 0; i < 10; ++i) { 96 v->nullspaceConstructors[i] = NULL; 97 } 98 } 99 v->numFields = 0; 100 v->fields = NULL; 101 102 *dm = v; 103 PetscFunctionReturn(0); 104 } 105 106 #undef __FUNCT__ 107 #define __FUNCT__ "DMClone" 108 /*@ 109 DMClone - Creates a DM object with the same topology as the original. 110 111 Collective on MPI_Comm 112 113 Input Parameter: 114 . dm - The original DM object 115 116 Output Parameter: 117 . newdm - The new DM object 118 119 Level: beginner 120 121 .keywords: DM, topology, create 122 @*/ 123 PetscErrorCode DMClone(DM dm, DM *newdm) 124 { 125 PetscSF sf; 126 Vec coords; 127 void *ctx; 128 PetscErrorCode ierr; 129 130 PetscFunctionBegin; 131 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 132 PetscValidPointer(newdm,2); 133 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 134 if (dm->ops->clone) { 135 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 136 } 137 (*newdm)->setupcalled = PETSC_TRUE; 138 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 139 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 140 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 141 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 142 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 143 if (coords) { 144 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 145 } else { 146 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 147 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 148 } 149 PetscFunctionReturn(0); 150 } 151 152 #undef __FUNCT__ 153 #define __FUNCT__ "DMSetVecType" 154 /*@C 155 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 156 157 Logically Collective on DMDA 158 159 Input Parameter: 160 + da - initial distributed array 161 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 162 163 Options Database: 164 . -dm_vec_type ctype 165 166 Level: intermediate 167 168 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 169 @*/ 170 PetscErrorCode DMSetVecType(DM da,VecType ctype) 171 { 172 PetscErrorCode ierr; 173 174 PetscFunctionBegin; 175 PetscValidHeaderSpecific(da,DM_CLASSID,1); 176 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 177 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 178 PetscFunctionReturn(0); 179 } 180 181 #undef __FUNCT__ 182 #define __FUNCT__ "VecGetDM" 183 /*@ 184 VecGetDM - Gets the DM defining the data layout of the vector 185 186 Not collective 187 188 Input Parameter: 189 . v - The Vec 190 191 Output Parameter: 192 . dm - The DM 193 194 Level: intermediate 195 196 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 197 @*/ 198 PetscErrorCode VecGetDM(Vec v, DM *dm) 199 { 200 PetscErrorCode ierr; 201 202 PetscFunctionBegin; 203 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 204 PetscValidPointer(dm,2); 205 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 206 PetscFunctionReturn(0); 207 } 208 209 #undef __FUNCT__ 210 #define __FUNCT__ "VecSetDM" 211 /*@ 212 VecSetDM - Sets the DM defining the data layout of the vector 213 214 Not collective 215 216 Input Parameters: 217 + v - The Vec 218 - dm - The DM 219 220 Level: intermediate 221 222 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 223 @*/ 224 PetscErrorCode VecSetDM(Vec v, DM dm) 225 { 226 PetscErrorCode ierr; 227 228 PetscFunctionBegin; 229 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 230 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 231 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 232 PetscFunctionReturn(0); 233 } 234 235 #undef __FUNCT__ 236 #define __FUNCT__ "DMSetMatType" 237 /*@C 238 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 239 240 Logically Collective on DM 241 242 Input Parameter: 243 + dm - the DM context 244 . ctype - the matrix type 245 246 Options Database: 247 . -dm_mat_type ctype 248 249 Level: intermediate 250 251 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 252 @*/ 253 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 254 { 255 PetscErrorCode ierr; 256 257 PetscFunctionBegin; 258 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 259 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 260 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 261 PetscFunctionReturn(0); 262 } 263 264 #undef __FUNCT__ 265 #define __FUNCT__ "MatGetDM" 266 /*@ 267 MatGetDM - Gets the DM defining the data layout of the matrix 268 269 Not collective 270 271 Input Parameter: 272 . A - The Mat 273 274 Output Parameter: 275 . dm - The DM 276 277 Level: intermediate 278 279 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 280 @*/ 281 PetscErrorCode MatGetDM(Mat A, DM *dm) 282 { 283 PetscErrorCode ierr; 284 285 PetscFunctionBegin; 286 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 287 PetscValidPointer(dm,2); 288 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 289 PetscFunctionReturn(0); 290 } 291 292 #undef __FUNCT__ 293 #define __FUNCT__ "MatSetDM" 294 /*@ 295 MatSetDM - Sets the DM defining the data layout of the matrix 296 297 Not collective 298 299 Input Parameters: 300 + A - The Mat 301 - dm - The DM 302 303 Level: intermediate 304 305 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 306 @*/ 307 PetscErrorCode MatSetDM(Mat A, DM dm) 308 { 309 PetscErrorCode ierr; 310 311 PetscFunctionBegin; 312 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 313 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 314 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 315 PetscFunctionReturn(0); 316 } 317 318 #undef __FUNCT__ 319 #define __FUNCT__ "DMSetOptionsPrefix" 320 /*@C 321 DMSetOptionsPrefix - Sets the prefix used for searching for all 322 DMDA options in the database. 323 324 Logically Collective on DMDA 325 326 Input Parameter: 327 + da - the DMDA context 328 - prefix - the prefix to prepend to all option names 329 330 Notes: 331 A hyphen (-) must NOT be given at the beginning of the prefix name. 332 The first character of all runtime options is AUTOMATICALLY the hyphen. 333 334 Level: advanced 335 336 .keywords: DMDA, set, options, prefix, database 337 338 .seealso: DMSetFromOptions() 339 @*/ 340 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 341 { 342 PetscErrorCode ierr; 343 344 PetscFunctionBegin; 345 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 346 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 347 PetscFunctionReturn(0); 348 } 349 350 #undef __FUNCT__ 351 #define __FUNCT__ "DMDestroy" 352 /*@ 353 DMDestroy - Destroys a vector packer or DMDA. 354 355 Collective on DM 356 357 Input Parameter: 358 . dm - the DM object to destroy 359 360 Level: developer 361 362 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 363 364 @*/ 365 PetscErrorCode DMDestroy(DM *dm) 366 { 367 PetscInt i, cnt = 0, f; 368 DMNamedVecLink nlink,nnext; 369 PetscErrorCode ierr; 370 371 PetscFunctionBegin; 372 if (!*dm) PetscFunctionReturn(0); 373 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 374 375 /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */ 376 for (f = 0; f < (*dm)->numFields; ++f) { 377 ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr); 378 ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr); 379 ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr); 380 } 381 /* count all the circular references of DM and its contained Vecs */ 382 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 383 if ((*dm)->localin[i]) cnt++; 384 if ((*dm)->globalin[i]) cnt++; 385 } 386 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 387 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 388 if ((*dm)->x) { 389 DM obj; 390 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 391 if (obj == *dm) cnt++; 392 } 393 394 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 395 /* 396 Need this test because the dm references the vectors that 397 reference the dm, so destroying the dm calls destroy on the 398 vectors that cause another destroy on the dm 399 */ 400 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 401 ((PetscObject) (*dm))->refct = 0; 402 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 403 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 404 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 405 } 406 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 407 nnext = nlink->next; 408 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 409 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 410 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 411 ierr = PetscFree(nlink);CHKERRQ(ierr); 412 } 413 (*dm)->namedglobal = NULL; 414 415 for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */ 416 nnext = nlink->next; 417 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 418 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 419 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 420 ierr = PetscFree(nlink);CHKERRQ(ierr); 421 } 422 (*dm)->namedlocal = NULL; 423 424 /* Destroy the list of hooks */ 425 { 426 DMCoarsenHookLink link,next; 427 for (link=(*dm)->coarsenhook; link; link=next) { 428 next = link->next; 429 ierr = PetscFree(link);CHKERRQ(ierr); 430 } 431 (*dm)->coarsenhook = NULL; 432 } 433 { 434 DMRefineHookLink link,next; 435 for (link=(*dm)->refinehook; link; link=next) { 436 next = link->next; 437 ierr = PetscFree(link);CHKERRQ(ierr); 438 } 439 (*dm)->refinehook = NULL; 440 } 441 { 442 DMSubDomainHookLink link,next; 443 for (link=(*dm)->subdomainhook; link; link=next) { 444 next = link->next; 445 ierr = PetscFree(link);CHKERRQ(ierr); 446 } 447 (*dm)->subdomainhook = NULL; 448 } 449 { 450 DMGlobalToLocalHookLink link,next; 451 for (link=(*dm)->gtolhook; link; link=next) { 452 next = link->next; 453 ierr = PetscFree(link);CHKERRQ(ierr); 454 } 455 (*dm)->gtolhook = NULL; 456 } 457 /* Destroy the work arrays */ 458 { 459 DMWorkLink link,next; 460 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 461 for (link=(*dm)->workin; link; link=next) { 462 next = link->next; 463 ierr = PetscFree(link->mem);CHKERRQ(ierr); 464 ierr = PetscFree(link);CHKERRQ(ierr); 465 } 466 (*dm)->workin = NULL; 467 } 468 469 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 470 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 471 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 472 473 if ((*dm)->ctx && (*dm)->ctxdestroy) { 474 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 475 } 476 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 477 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 478 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 479 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 480 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 481 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 482 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 483 484 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 485 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 486 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 487 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 488 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 489 490 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 491 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 492 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 493 494 for (f = 0; f < (*dm)->numFields; ++f) { 495 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 496 } 497 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 498 /* if memory was published with AMS then destroy it */ 499 ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr); 500 501 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 502 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 503 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 504 PetscFunctionReturn(0); 505 } 506 507 #undef __FUNCT__ 508 #define __FUNCT__ "DMSetUp" 509 /*@ 510 DMSetUp - sets up the data structures inside a DM object 511 512 Collective on DM 513 514 Input Parameter: 515 . dm - the DM object to setup 516 517 Level: developer 518 519 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 520 521 @*/ 522 PetscErrorCode DMSetUp(DM dm) 523 { 524 PetscErrorCode ierr; 525 526 PetscFunctionBegin; 527 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 528 if (dm->setupcalled) PetscFunctionReturn(0); 529 if (dm->ops->setup) { 530 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 531 } 532 dm->setupcalled = PETSC_TRUE; 533 PetscFunctionReturn(0); 534 } 535 536 #undef __FUNCT__ 537 #define __FUNCT__ "DMSetFromOptions" 538 /*@ 539 DMSetFromOptions - sets parameters in a DM from the options database 540 541 Collective on DM 542 543 Input Parameter: 544 . dm - the DM object to set options for 545 546 Options Database: 547 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 548 . -dm_vec_type <type> type of vector to create inside DM 549 . -dm_mat_type <type> type of matrix to create inside DM 550 - -dm_coloring_type <global or ghosted> 551 552 Level: developer 553 554 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 555 556 @*/ 557 PetscErrorCode DMSetFromOptions(DM dm) 558 { 559 char typeName[256] = MATAIJ; 560 PetscBool flg; 561 PetscErrorCode ierr; 562 563 PetscFunctionBegin; 564 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 565 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 566 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 567 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 568 if (flg) { 569 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 570 } 571 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 572 if (flg) { 573 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 574 } 575 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 576 if (dm->ops->setfromoptions) { 577 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 578 } 579 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 580 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 581 ierr = PetscOptionsEnd();CHKERRQ(ierr); 582 ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr); 583 PetscFunctionReturn(0); 584 } 585 586 #undef __FUNCT__ 587 #define __FUNCT__ "DMView" 588 /*@C 589 DMView - Views a vector packer or DMDA. 590 591 Collective on DM 592 593 Input Parameter: 594 + dm - the DM object to view 595 - v - the viewer 596 597 Level: developer 598 599 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 600 601 @*/ 602 PetscErrorCode DMView(DM dm,PetscViewer v) 603 { 604 PetscErrorCode ierr; 605 PetscBool isbinary; 606 607 PetscFunctionBegin; 608 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 609 if (!v) { 610 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 611 } 612 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 613 if (isbinary) { 614 PetscInt classid = DM_FILE_CLASSID; 615 char type[256]; 616 617 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 618 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 619 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 620 } 621 if (dm->ops->view) { 622 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 623 } 624 PetscFunctionReturn(0); 625 } 626 627 #undef __FUNCT__ 628 #define __FUNCT__ "DMCreateGlobalVector" 629 /*@ 630 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 631 632 Collective on DM 633 634 Input Parameter: 635 . dm - the DM object 636 637 Output Parameter: 638 . vec - the global vector 639 640 Level: beginner 641 642 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 643 644 @*/ 645 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 646 { 647 PetscErrorCode ierr; 648 649 PetscFunctionBegin; 650 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 651 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 652 PetscFunctionReturn(0); 653 } 654 655 #undef __FUNCT__ 656 #define __FUNCT__ "DMCreateLocalVector" 657 /*@ 658 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 659 660 Not Collective 661 662 Input Parameter: 663 . dm - the DM object 664 665 Output Parameter: 666 . vec - the local vector 667 668 Level: beginner 669 670 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 671 672 @*/ 673 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 674 { 675 PetscErrorCode ierr; 676 677 PetscFunctionBegin; 678 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 679 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 680 PetscFunctionReturn(0); 681 } 682 683 #undef __FUNCT__ 684 #define __FUNCT__ "DMGetLocalToGlobalMapping" 685 /*@ 686 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 687 688 Collective on DM 689 690 Input Parameter: 691 . dm - the DM that provides the mapping 692 693 Output Parameter: 694 . ltog - the mapping 695 696 Level: intermediate 697 698 Notes: 699 This mapping can then be used by VecSetLocalToGlobalMapping() or 700 MatSetLocalToGlobalMapping(). 701 702 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 703 @*/ 704 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 705 { 706 PetscErrorCode ierr; 707 708 PetscFunctionBegin; 709 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 710 PetscValidPointer(ltog,2); 711 if (!dm->ltogmap) { 712 PetscSection section, sectionGlobal; 713 714 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 715 if (section) { 716 PetscInt *ltog; 717 PetscInt pStart, pEnd, size, p, l; 718 719 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 720 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 721 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 722 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 723 for (p = pStart, l = 0; p < pEnd; ++p) { 724 PetscInt dof, off, c; 725 726 /* Should probably use constrained dofs */ 727 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 728 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 729 for (c = 0; c < dof; ++c, ++l) { 730 ltog[l] = off+c; 731 } 732 } 733 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 734 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 735 } else { 736 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 737 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 738 } 739 } 740 *ltog = dm->ltogmap; 741 PetscFunctionReturn(0); 742 } 743 744 #undef __FUNCT__ 745 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 746 /*@ 747 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 748 749 Collective on DM 750 751 Input Parameter: 752 . da - the distributed array that provides the mapping 753 754 Output Parameter: 755 . ltog - the block mapping 756 757 Level: intermediate 758 759 Notes: 760 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 761 MatSetLocalToGlobalMappingBlock(). 762 763 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 764 @*/ 765 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 766 { 767 PetscErrorCode ierr; 768 769 PetscFunctionBegin; 770 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 771 PetscValidPointer(ltog,2); 772 if (!dm->ltogmapb) { 773 PetscInt bs; 774 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 775 if (bs > 1) { 776 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 777 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 778 } else { 779 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 780 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 781 } 782 } 783 *ltog = dm->ltogmapb; 784 PetscFunctionReturn(0); 785 } 786 787 #undef __FUNCT__ 788 #define __FUNCT__ "DMGetBlockSize" 789 /*@ 790 DMGetBlockSize - Gets the inherent block size associated with a DM 791 792 Not Collective 793 794 Input Parameter: 795 . dm - the DM with block structure 796 797 Output Parameter: 798 . bs - the block size, 1 implies no exploitable block structure 799 800 Level: intermediate 801 802 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 803 @*/ 804 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 805 { 806 PetscFunctionBegin; 807 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 808 PetscValidPointer(bs,2); 809 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 810 *bs = dm->bs; 811 PetscFunctionReturn(0); 812 } 813 814 #undef __FUNCT__ 815 #define __FUNCT__ "DMCreateInterpolation" 816 /*@ 817 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 818 819 Collective on DM 820 821 Input Parameter: 822 + dm1 - the DM object 823 - dm2 - the second, finer DM object 824 825 Output Parameter: 826 + mat - the interpolation 827 - vec - the scaling (optional) 828 829 Level: developer 830 831 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 832 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 833 834 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 835 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 836 837 838 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 839 840 @*/ 841 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 842 { 843 PetscErrorCode ierr; 844 845 PetscFunctionBegin; 846 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 847 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 848 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 849 PetscFunctionReturn(0); 850 } 851 852 #undef __FUNCT__ 853 #define __FUNCT__ "DMCreateInjection" 854 /*@ 855 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 856 857 Collective on DM 858 859 Input Parameter: 860 + dm1 - the DM object 861 - dm2 - the second, finer DM object 862 863 Output Parameter: 864 . ctx - the injection 865 866 Level: developer 867 868 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 869 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 870 871 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 872 873 @*/ 874 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 875 { 876 PetscErrorCode ierr; 877 878 PetscFunctionBegin; 879 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 880 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 881 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 882 PetscFunctionReturn(0); 883 } 884 885 #undef __FUNCT__ 886 #define __FUNCT__ "DMCreateColoring" 887 /*@C 888 DMCreateColoring - Gets coloring for a DMDA or DMComposite 889 890 Collective on DM 891 892 Input Parameter: 893 + dm - the DM object 894 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 895 - matype - either MATAIJ or MATBAIJ 896 897 Output Parameter: 898 . coloring - the coloring 899 900 Level: developer 901 902 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 903 904 @*/ 905 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 906 { 907 PetscErrorCode ierr; 908 909 PetscFunctionBegin; 910 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 911 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 912 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 913 PetscFunctionReturn(0); 914 } 915 916 #undef __FUNCT__ 917 #define __FUNCT__ "DMCreateMatrix" 918 /*@C 919 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 920 921 Collective on DM 922 923 Input Parameter: 924 + dm - the DM object 925 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 926 any type which inherits from one of these (such as MATAIJ) 927 928 Output Parameter: 929 . mat - the empty Jacobian 930 931 Level: beginner 932 933 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 934 do not need to do it yourself. 935 936 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 937 the nonzero pattern call DMDASetMatPreallocateOnly() 938 939 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 940 internally by PETSc. 941 942 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 943 the indices for the global numbering for DMDAs which is complicated. 944 945 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 946 947 @*/ 948 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 949 { 950 PetscErrorCode ierr; 951 952 PetscFunctionBegin; 953 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 954 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 955 ierr = MatInitializePackage();CHKERRQ(ierr); 956 #endif 957 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 958 PetscValidPointer(mat,3); 959 if (dm->mattype) { 960 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 961 } else { 962 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 963 } 964 PetscFunctionReturn(0); 965 } 966 967 #undef __FUNCT__ 968 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 969 /*@ 970 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 971 preallocated but the nonzero structure and zero values will not be set. 972 973 Logically Collective on DMDA 974 975 Input Parameter: 976 + dm - the DM 977 - only - PETSC_TRUE if only want preallocation 978 979 Level: developer 980 .seealso DMCreateMatrix() 981 @*/ 982 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 983 { 984 PetscFunctionBegin; 985 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 986 dm->prealloc_only = only; 987 PetscFunctionReturn(0); 988 } 989 990 #undef __FUNCT__ 991 #define __FUNCT__ "DMGetWorkArray" 992 /*@C 993 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 994 995 Not Collective 996 997 Input Parameters: 998 + dm - the DM object 999 . count - The minium size 1000 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1001 1002 Output Parameter: 1003 . array - the work array 1004 1005 Level: developer 1006 1007 .seealso DMDestroy(), DMCreate() 1008 @*/ 1009 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1010 { 1011 PetscErrorCode ierr; 1012 DMWorkLink link; 1013 size_t size; 1014 1015 PetscFunctionBegin; 1016 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1017 PetscValidPointer(mem,4); 1018 if (dm->workin) { 1019 link = dm->workin; 1020 dm->workin = dm->workin->next; 1021 } else { 1022 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 1023 } 1024 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1025 if (size*count > link->bytes) { 1026 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1027 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1028 link->bytes = size*count; 1029 } 1030 link->next = dm->workout; 1031 dm->workout = link; 1032 *(void**)mem = link->mem; 1033 PetscFunctionReturn(0); 1034 } 1035 1036 #undef __FUNCT__ 1037 #define __FUNCT__ "DMRestoreWorkArray" 1038 /*@C 1039 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1040 1041 Not Collective 1042 1043 Input Parameters: 1044 + dm - the DM object 1045 . count - The minium size 1046 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1047 1048 Output Parameter: 1049 . array - the work array 1050 1051 Level: developer 1052 1053 .seealso DMDestroy(), DMCreate() 1054 @*/ 1055 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1056 { 1057 DMWorkLink *p,link; 1058 1059 PetscFunctionBegin; 1060 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1061 PetscValidPointer(mem,4); 1062 for (p=&dm->workout; (link=*p); p=&link->next) { 1063 if (link->mem == *(void**)mem) { 1064 *p = link->next; 1065 link->next = dm->workin; 1066 dm->workin = link; 1067 *(void**)mem = NULL; 1068 PetscFunctionReturn(0); 1069 } 1070 } 1071 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1072 PetscFunctionReturn(0); 1073 } 1074 1075 #undef __FUNCT__ 1076 #define __FUNCT__ "DMSetNullSpaceConstructor" 1077 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1078 { 1079 PetscFunctionBegin; 1080 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1081 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1082 dm->nullspaceConstructors[field] = nullsp; 1083 PetscFunctionReturn(0); 1084 } 1085 1086 #undef __FUNCT__ 1087 #define __FUNCT__ "DMCreateFieldIS" 1088 /*@C 1089 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1090 1091 Not collective 1092 1093 Input Parameter: 1094 . dm - the DM object 1095 1096 Output Parameters: 1097 + numFields - The number of fields (or NULL if not requested) 1098 . fieldNames - The name for each field (or NULL if not requested) 1099 - fields - The global indices for each field (or NULL if not requested) 1100 1101 Level: intermediate 1102 1103 Notes: 1104 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1105 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1106 PetscFree(). 1107 1108 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1109 @*/ 1110 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1111 { 1112 PetscSection section, sectionGlobal; 1113 PetscErrorCode ierr; 1114 1115 PetscFunctionBegin; 1116 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1117 if (numFields) { 1118 PetscValidPointer(numFields,2); 1119 *numFields = 0; 1120 } 1121 if (fieldNames) { 1122 PetscValidPointer(fieldNames,3); 1123 *fieldNames = NULL; 1124 } 1125 if (fields) { 1126 PetscValidPointer(fields,4); 1127 *fields = NULL; 1128 } 1129 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1130 if (section) { 1131 PetscInt *fieldSizes, **fieldIndices; 1132 PetscInt nF, f, pStart, pEnd, p; 1133 1134 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1135 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1136 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr); 1137 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1138 for (f = 0; f < nF; ++f) { 1139 fieldSizes[f] = 0; 1140 } 1141 for (p = pStart; p < pEnd; ++p) { 1142 PetscInt gdof; 1143 1144 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1145 if (gdof > 0) { 1146 for (f = 0; f < nF; ++f) { 1147 PetscInt fdof, fcdof; 1148 1149 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1150 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1151 fieldSizes[f] += fdof-fcdof; 1152 } 1153 } 1154 } 1155 for (f = 0; f < nF; ++f) { 1156 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1157 fieldSizes[f] = 0; 1158 } 1159 for (p = pStart; p < pEnd; ++p) { 1160 PetscInt gdof, goff; 1161 1162 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1163 if (gdof > 0) { 1164 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1165 for (f = 0; f < nF; ++f) { 1166 PetscInt fdof, fcdof, fc; 1167 1168 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1169 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1170 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1171 fieldIndices[f][fieldSizes[f]] = goff++; 1172 } 1173 } 1174 } 1175 } 1176 if (numFields) *numFields = nF; 1177 if (fieldNames) { 1178 ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr); 1179 for (f = 0; f < nF; ++f) { 1180 const char *fieldName; 1181 1182 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1183 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1184 } 1185 } 1186 if (fields) { 1187 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1188 for (f = 0; f < nF; ++f) { 1189 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1190 } 1191 } 1192 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1193 } else if (dm->ops->createfieldis) { 1194 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1195 } 1196 PetscFunctionReturn(0); 1197 } 1198 1199 1200 #undef __FUNCT__ 1201 #define __FUNCT__ "DMCreateFieldDecomposition" 1202 /*@C 1203 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1204 corresponding to different fields: each IS contains the global indices of the dofs of the 1205 corresponding field. The optional list of DMs define the DM for each subproblem. 1206 Generalizes DMCreateFieldIS(). 1207 1208 Not collective 1209 1210 Input Parameter: 1211 . dm - the DM object 1212 1213 Output Parameters: 1214 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1215 . namelist - The name for each field (or NULL if not requested) 1216 . islist - The global indices for each field (or NULL if not requested) 1217 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1218 1219 Level: intermediate 1220 1221 Notes: 1222 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1223 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1224 and all of the arrays should be freed with PetscFree(). 1225 1226 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1227 @*/ 1228 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1229 { 1230 PetscErrorCode ierr; 1231 1232 PetscFunctionBegin; 1233 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1234 if (len) { 1235 PetscValidPointer(len,2); 1236 *len = 0; 1237 } 1238 if (namelist) { 1239 PetscValidPointer(namelist,3); 1240 *namelist = 0; 1241 } 1242 if (islist) { 1243 PetscValidPointer(islist,4); 1244 *islist = 0; 1245 } 1246 if (dmlist) { 1247 PetscValidPointer(dmlist,5); 1248 *dmlist = 0; 1249 } 1250 /* 1251 Is it a good idea to apply the following check across all impls? 1252 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1253 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1254 */ 1255 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1256 if (!dm->ops->createfielddecomposition) { 1257 PetscSection section; 1258 PetscInt numFields, f; 1259 1260 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1261 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1262 if (section && numFields && dm->ops->createsubdm) { 1263 *len = numFields; 1264 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1265 for (f = 0; f < numFields; ++f) { 1266 const char *fieldName; 1267 1268 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1269 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1270 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1271 } 1272 } else { 1273 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1274 /* By default there are no DMs associated with subproblems. */ 1275 if (dmlist) *dmlist = NULL; 1276 } 1277 } else { 1278 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1279 } 1280 PetscFunctionReturn(0); 1281 } 1282 1283 #undef __FUNCT__ 1284 #define __FUNCT__ "DMCreateSubDM" 1285 /*@C 1286 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1287 The fields are defined by DMCreateFieldIS(). 1288 1289 Not collective 1290 1291 Input Parameters: 1292 + dm - the DM object 1293 . numFields - number of fields in this subproblem 1294 - len - The number of subproblems in the decomposition (or NULL if not requested) 1295 1296 Output Parameters: 1297 . is - The global indices for the subproblem 1298 - dm - The DM for the subproblem 1299 1300 Level: intermediate 1301 1302 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1303 @*/ 1304 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1305 { 1306 PetscErrorCode ierr; 1307 1308 PetscFunctionBegin; 1309 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1310 PetscValidPointer(fields,3); 1311 if (is) PetscValidPointer(is,4); 1312 if (subdm) PetscValidPointer(subdm,5); 1313 if (dm->ops->createsubdm) { 1314 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1315 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1316 PetscFunctionReturn(0); 1317 } 1318 1319 1320 #undef __FUNCT__ 1321 #define __FUNCT__ "DMCreateDomainDecomposition" 1322 /*@C 1323 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1324 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1325 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1326 define a nonoverlapping covering, while outer subdomains can overlap. 1327 The optional list of DMs define the DM for each subproblem. 1328 1329 Not collective 1330 1331 Input Parameter: 1332 . dm - the DM object 1333 1334 Output Parameters: 1335 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1336 . namelist - The name for each subdomain (or NULL if not requested) 1337 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1338 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1339 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1340 1341 Level: intermediate 1342 1343 Notes: 1344 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1345 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1346 and all of the arrays should be freed with PetscFree(). 1347 1348 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1349 @*/ 1350 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1351 { 1352 PetscErrorCode ierr; 1353 DMSubDomainHookLink link; 1354 PetscInt i,l; 1355 1356 PetscFunctionBegin; 1357 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1358 if (len) {PetscValidPointer(len,2); *len = 0;} 1359 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1360 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1361 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1362 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1363 /* 1364 Is it a good idea to apply the following check across all impls? 1365 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1366 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1367 */ 1368 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1369 if (dm->ops->createdomaindecomposition) { 1370 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1371 /* copy subdomain hooks and context over to the subdomain DMs */ 1372 if (dmlist) { 1373 for (i = 0; i < l; i++) { 1374 for (link=dm->subdomainhook; link; link=link->next) { 1375 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1376 } 1377 (*dmlist)[i]->ctx = dm->ctx; 1378 } 1379 } 1380 if (len) *len = l; 1381 } 1382 PetscFunctionReturn(0); 1383 } 1384 1385 1386 #undef __FUNCT__ 1387 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1388 /*@C 1389 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1390 1391 Not collective 1392 1393 Input Parameters: 1394 + dm - the DM object 1395 . n - the number of subdomain scatters 1396 - subdms - the local subdomains 1397 1398 Output Parameters: 1399 + n - the number of scatters returned 1400 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1401 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1402 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1403 1404 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1405 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1406 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1407 solution and residual data. 1408 1409 Level: developer 1410 1411 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1412 @*/ 1413 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1414 { 1415 PetscErrorCode ierr; 1416 1417 PetscFunctionBegin; 1418 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1419 PetscValidPointer(subdms,3); 1420 if (dm->ops->createddscatters) { 1421 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1422 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1423 PetscFunctionReturn(0); 1424 } 1425 1426 #undef __FUNCT__ 1427 #define __FUNCT__ "DMRefine" 1428 /*@ 1429 DMRefine - Refines a DM object 1430 1431 Collective on DM 1432 1433 Input Parameter: 1434 + dm - the DM object 1435 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1436 1437 Output Parameter: 1438 . dmf - the refined DM, or NULL 1439 1440 Note: If no refinement was done, the return value is NULL 1441 1442 Level: developer 1443 1444 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1445 @*/ 1446 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1447 { 1448 PetscErrorCode ierr; 1449 DMRefineHookLink link; 1450 1451 PetscFunctionBegin; 1452 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1453 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1454 if (*dmf) { 1455 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1456 1457 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1458 1459 (*dmf)->ctx = dm->ctx; 1460 (*dmf)->leveldown = dm->leveldown; 1461 (*dmf)->levelup = dm->levelup + 1; 1462 1463 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1464 for (link=dm->refinehook; link; link=link->next) { 1465 if (link->refinehook) { 1466 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1467 } 1468 } 1469 } 1470 PetscFunctionReturn(0); 1471 } 1472 1473 #undef __FUNCT__ 1474 #define __FUNCT__ "DMRefineHookAdd" 1475 /*@C 1476 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1477 1478 Logically Collective 1479 1480 Input Arguments: 1481 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1482 . refinehook - function to run when setting up a coarser level 1483 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1484 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1485 1486 Calling sequence of refinehook: 1487 $ refinehook(DM coarse,DM fine,void *ctx); 1488 1489 + coarse - coarse level DM 1490 . fine - fine level DM to interpolate problem to 1491 - ctx - optional user-defined function context 1492 1493 Calling sequence for interphook: 1494 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1495 1496 + coarse - coarse level DM 1497 . interp - matrix interpolating a coarse-level solution to the finer grid 1498 . fine - fine level DM to update 1499 - ctx - optional user-defined function context 1500 1501 Level: advanced 1502 1503 Notes: 1504 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1505 1506 If this function is called multiple times, the hooks will be run in the order they are added. 1507 1508 This function is currently not available from Fortran. 1509 1510 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1511 @*/ 1512 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1513 { 1514 PetscErrorCode ierr; 1515 DMRefineHookLink link,*p; 1516 1517 PetscFunctionBegin; 1518 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1519 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1520 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1521 link->refinehook = refinehook; 1522 link->interphook = interphook; 1523 link->ctx = ctx; 1524 link->next = NULL; 1525 *p = link; 1526 PetscFunctionReturn(0); 1527 } 1528 1529 #undef __FUNCT__ 1530 #define __FUNCT__ "DMInterpolate" 1531 /*@ 1532 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1533 1534 Collective if any hooks are 1535 1536 Input Arguments: 1537 + coarse - coarser DM to use as a base 1538 . restrct - interpolation matrix, apply using MatInterpolate() 1539 - fine - finer DM to update 1540 1541 Level: developer 1542 1543 .seealso: DMRefineHookAdd(), MatInterpolate() 1544 @*/ 1545 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1546 { 1547 PetscErrorCode ierr; 1548 DMRefineHookLink link; 1549 1550 PetscFunctionBegin; 1551 for (link=fine->refinehook; link; link=link->next) { 1552 if (link->interphook) { 1553 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1554 } 1555 } 1556 PetscFunctionReturn(0); 1557 } 1558 1559 #undef __FUNCT__ 1560 #define __FUNCT__ "DMGetRefineLevel" 1561 /*@ 1562 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1563 1564 Not Collective 1565 1566 Input Parameter: 1567 . dm - the DM object 1568 1569 Output Parameter: 1570 . level - number of refinements 1571 1572 Level: developer 1573 1574 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1575 1576 @*/ 1577 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1578 { 1579 PetscFunctionBegin; 1580 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1581 *level = dm->levelup; 1582 PetscFunctionReturn(0); 1583 } 1584 1585 #undef __FUNCT__ 1586 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1587 /*@C 1588 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1589 1590 Logically Collective 1591 1592 Input Arguments: 1593 + dm - the DM 1594 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1595 . endhook - function to run after DMGlobalToLocalEnd() has completed 1596 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1597 1598 Calling sequence for beginhook: 1599 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1600 1601 + dm - global DM 1602 . g - global vector 1603 . mode - mode 1604 . l - local vector 1605 - ctx - optional user-defined function context 1606 1607 1608 Calling sequence for endhook: 1609 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1610 1611 + global - global DM 1612 - ctx - optional user-defined function context 1613 1614 Level: advanced 1615 1616 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1617 @*/ 1618 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1619 { 1620 PetscErrorCode ierr; 1621 DMGlobalToLocalHookLink link,*p; 1622 1623 PetscFunctionBegin; 1624 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1625 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1626 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1627 link->beginhook = beginhook; 1628 link->endhook = endhook; 1629 link->ctx = ctx; 1630 link->next = NULL; 1631 *p = link; 1632 PetscFunctionReturn(0); 1633 } 1634 1635 #undef __FUNCT__ 1636 #define __FUNCT__ "DMGlobalToLocalBegin" 1637 /*@ 1638 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1639 1640 Neighbor-wise Collective on DM 1641 1642 Input Parameters: 1643 + dm - the DM object 1644 . g - the global vector 1645 . mode - INSERT_VALUES or ADD_VALUES 1646 - l - the local vector 1647 1648 1649 Level: beginner 1650 1651 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1652 1653 @*/ 1654 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1655 { 1656 PetscSF sf; 1657 PetscErrorCode ierr; 1658 DMGlobalToLocalHookLink link; 1659 1660 PetscFunctionBegin; 1661 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1662 for (link=dm->gtolhook; link; link=link->next) { 1663 if (link->beginhook) { 1664 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1665 } 1666 } 1667 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1668 if (sf) { 1669 PetscScalar *lArray, *gArray; 1670 1671 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1672 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1673 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1674 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1675 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1676 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1677 } else { 1678 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1679 } 1680 PetscFunctionReturn(0); 1681 } 1682 1683 #undef __FUNCT__ 1684 #define __FUNCT__ "DMGlobalToLocalEnd" 1685 /*@ 1686 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1687 1688 Neighbor-wise Collective on DM 1689 1690 Input Parameters: 1691 + dm - the DM object 1692 . g - the global vector 1693 . mode - INSERT_VALUES or ADD_VALUES 1694 - l - the local vector 1695 1696 1697 Level: beginner 1698 1699 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1700 1701 @*/ 1702 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1703 { 1704 PetscSF sf; 1705 PetscErrorCode ierr; 1706 PetscScalar *lArray, *gArray; 1707 DMGlobalToLocalHookLink link; 1708 1709 PetscFunctionBegin; 1710 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1711 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1712 if (sf) { 1713 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1714 1715 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1716 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1717 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1718 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1719 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1720 } else { 1721 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1722 } 1723 for (link=dm->gtolhook; link; link=link->next) { 1724 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1725 } 1726 PetscFunctionReturn(0); 1727 } 1728 1729 #undef __FUNCT__ 1730 #define __FUNCT__ "DMLocalToGlobalBegin" 1731 /*@ 1732 DMLocalToGlobalBegin - updates global vectors from local vectors 1733 1734 Neighbor-wise Collective on DM 1735 1736 Input Parameters: 1737 + dm - the DM object 1738 . l - the local vector 1739 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1740 base point. 1741 - - the global vector 1742 1743 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1744 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1745 global array to the final global array with VecAXPY(). 1746 1747 Level: beginner 1748 1749 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1750 1751 @*/ 1752 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1753 { 1754 PetscSF sf; 1755 PetscErrorCode ierr; 1756 1757 PetscFunctionBegin; 1758 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1759 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1760 if (sf) { 1761 MPI_Op op; 1762 PetscScalar *lArray, *gArray; 1763 1764 switch (mode) { 1765 case INSERT_VALUES: 1766 case INSERT_ALL_VALUES: 1767 op = MPIU_REPLACE; break; 1768 case ADD_VALUES: 1769 case ADD_ALL_VALUES: 1770 op = MPI_SUM; break; 1771 default: 1772 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1773 } 1774 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1775 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1776 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1777 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1778 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1779 } else { 1780 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1781 } 1782 PetscFunctionReturn(0); 1783 } 1784 1785 #undef __FUNCT__ 1786 #define __FUNCT__ "DMLocalToGlobalEnd" 1787 /*@ 1788 DMLocalToGlobalEnd - updates global vectors from local vectors 1789 1790 Neighbor-wise Collective on DM 1791 1792 Input Parameters: 1793 + dm - the DM object 1794 . l - the local vector 1795 . mode - INSERT_VALUES or ADD_VALUES 1796 - g - the global vector 1797 1798 1799 Level: beginner 1800 1801 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1802 1803 @*/ 1804 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1805 { 1806 PetscSF sf; 1807 PetscErrorCode ierr; 1808 1809 PetscFunctionBegin; 1810 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1811 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1812 if (sf) { 1813 MPI_Op op; 1814 PetscScalar *lArray, *gArray; 1815 1816 switch (mode) { 1817 case INSERT_VALUES: 1818 case INSERT_ALL_VALUES: 1819 op = MPIU_REPLACE; break; 1820 case ADD_VALUES: 1821 case ADD_ALL_VALUES: 1822 op = MPI_SUM; break; 1823 default: 1824 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1825 } 1826 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1827 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1828 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1829 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1830 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1831 } else { 1832 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1833 } 1834 PetscFunctionReturn(0); 1835 } 1836 1837 #undef __FUNCT__ 1838 #define __FUNCT__ "DMLocalToLocalBegin" 1839 /*@ 1840 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1841 that contain irrelevant values) to another local vector where the ghost 1842 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1843 1844 Neighbor-wise Collective on DM and Vec 1845 1846 Input Parameters: 1847 + dm - the DM object 1848 . g - the original local vector 1849 - mode - one of INSERT_VALUES or ADD_VALUES 1850 1851 Output Parameter: 1852 . l - the local vector with correct ghost values 1853 1854 Level: intermediate 1855 1856 Notes: 1857 The local vectors used here need not be the same as those 1858 obtained from DMCreateLocalVector(), BUT they 1859 must have the same parallel data layout; they could, for example, be 1860 obtained with VecDuplicate() from the DM originating vectors. 1861 1862 .keywords: DM, local-to-local, begin 1863 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1864 1865 @*/ 1866 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1867 { 1868 PetscErrorCode ierr; 1869 1870 PetscFunctionBegin; 1871 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1872 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1873 PetscFunctionReturn(0); 1874 } 1875 1876 #undef __FUNCT__ 1877 #define __FUNCT__ "DMLocalToLocalEnd" 1878 /*@ 1879 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1880 that contain irrelevant values) to another local vector where the ghost 1881 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1882 1883 Neighbor-wise Collective on DM and Vec 1884 1885 Input Parameters: 1886 + da - the DM object 1887 . g - the original local vector 1888 - mode - one of INSERT_VALUES or ADD_VALUES 1889 1890 Output Parameter: 1891 . l - the local vector with correct ghost values 1892 1893 Level: intermediate 1894 1895 Notes: 1896 The local vectors used here need not be the same as those 1897 obtained from DMCreateLocalVector(), BUT they 1898 must have the same parallel data layout; they could, for example, be 1899 obtained with VecDuplicate() from the DM originating vectors. 1900 1901 .keywords: DM, local-to-local, end 1902 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1903 1904 @*/ 1905 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1906 { 1907 PetscErrorCode ierr; 1908 1909 PetscFunctionBegin; 1910 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1911 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1912 PetscFunctionReturn(0); 1913 } 1914 1915 1916 #undef __FUNCT__ 1917 #define __FUNCT__ "DMCoarsen" 1918 /*@ 1919 DMCoarsen - Coarsens a DM object 1920 1921 Collective on DM 1922 1923 Input Parameter: 1924 + dm - the DM object 1925 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1926 1927 Output Parameter: 1928 . dmc - the coarsened DM 1929 1930 Level: developer 1931 1932 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1933 1934 @*/ 1935 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1936 { 1937 PetscErrorCode ierr; 1938 DMCoarsenHookLink link; 1939 1940 PetscFunctionBegin; 1941 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1942 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1943 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1944 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1945 (*dmc)->ctx = dm->ctx; 1946 (*dmc)->levelup = dm->levelup; 1947 (*dmc)->leveldown = dm->leveldown + 1; 1948 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1949 for (link=dm->coarsenhook; link; link=link->next) { 1950 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1951 } 1952 PetscFunctionReturn(0); 1953 } 1954 1955 #undef __FUNCT__ 1956 #define __FUNCT__ "DMCoarsenHookAdd" 1957 /*@C 1958 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1959 1960 Logically Collective 1961 1962 Input Arguments: 1963 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1964 . coarsenhook - function to run when setting up a coarser level 1965 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1966 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1967 1968 Calling sequence of coarsenhook: 1969 $ coarsenhook(DM fine,DM coarse,void *ctx); 1970 1971 + fine - fine level DM 1972 . coarse - coarse level DM to restrict problem to 1973 - ctx - optional user-defined function context 1974 1975 Calling sequence for restricthook: 1976 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1977 1978 + fine - fine level DM 1979 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1980 . rscale - scaling vector for restriction 1981 . inject - matrix restricting by injection 1982 . coarse - coarse level DM to update 1983 - ctx - optional user-defined function context 1984 1985 Level: advanced 1986 1987 Notes: 1988 This function is only needed if auxiliary data needs to be set up on coarse grids. 1989 1990 If this function is called multiple times, the hooks will be run in the order they are added. 1991 1992 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1993 extract the finest level information from its context (instead of from the SNES). 1994 1995 This function is currently not available from Fortran. 1996 1997 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1998 @*/ 1999 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 2000 { 2001 PetscErrorCode ierr; 2002 DMCoarsenHookLink link,*p; 2003 2004 PetscFunctionBegin; 2005 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2006 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2007 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2008 link->coarsenhook = coarsenhook; 2009 link->restricthook = restricthook; 2010 link->ctx = ctx; 2011 link->next = NULL; 2012 *p = link; 2013 PetscFunctionReturn(0); 2014 } 2015 2016 #undef __FUNCT__ 2017 #define __FUNCT__ "DMRestrict" 2018 /*@ 2019 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2020 2021 Collective if any hooks are 2022 2023 Input Arguments: 2024 + fine - finer DM to use as a base 2025 . restrct - restriction matrix, apply using MatRestrict() 2026 . inject - injection matrix, also use MatRestrict() 2027 - coarse - coarer DM to update 2028 2029 Level: developer 2030 2031 .seealso: DMCoarsenHookAdd(), MatRestrict() 2032 @*/ 2033 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2034 { 2035 PetscErrorCode ierr; 2036 DMCoarsenHookLink link; 2037 2038 PetscFunctionBegin; 2039 for (link=fine->coarsenhook; link; link=link->next) { 2040 if (link->restricthook) { 2041 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2042 } 2043 } 2044 PetscFunctionReturn(0); 2045 } 2046 2047 #undef __FUNCT__ 2048 #define __FUNCT__ "DMSubDomainHookAdd" 2049 /*@C 2050 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2051 2052 Logically Collective 2053 2054 Input Arguments: 2055 + global - global DM 2056 . ddhook - function to run to pass data to the decomposition DM upon its creation 2057 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2058 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2059 2060 2061 Calling sequence for ddhook: 2062 $ ddhook(DM global,DM block,void *ctx) 2063 2064 + global - global DM 2065 . block - block DM 2066 - ctx - optional user-defined function context 2067 2068 Calling sequence for restricthook: 2069 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2070 2071 + global - global DM 2072 . out - scatter to the outer (with ghost and overlap points) block vector 2073 . in - scatter to block vector values only owned locally 2074 . block - block DM 2075 - ctx - optional user-defined function context 2076 2077 Level: advanced 2078 2079 Notes: 2080 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2081 2082 If this function is called multiple times, the hooks will be run in the order they are added. 2083 2084 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2085 extract the global information from its context (instead of from the SNES). 2086 2087 This function is currently not available from Fortran. 2088 2089 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2090 @*/ 2091 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2092 { 2093 PetscErrorCode ierr; 2094 DMSubDomainHookLink link,*p; 2095 2096 PetscFunctionBegin; 2097 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2098 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2099 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2100 link->restricthook = restricthook; 2101 link->ddhook = ddhook; 2102 link->ctx = ctx; 2103 link->next = NULL; 2104 *p = link; 2105 PetscFunctionReturn(0); 2106 } 2107 2108 #undef __FUNCT__ 2109 #define __FUNCT__ "DMSubDomainRestrict" 2110 /*@ 2111 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2112 2113 Collective if any hooks are 2114 2115 Input Arguments: 2116 + fine - finer DM to use as a base 2117 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2118 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2119 - coarse - coarer DM to update 2120 2121 Level: developer 2122 2123 .seealso: DMCoarsenHookAdd(), MatRestrict() 2124 @*/ 2125 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2126 { 2127 PetscErrorCode ierr; 2128 DMSubDomainHookLink link; 2129 2130 PetscFunctionBegin; 2131 for (link=global->subdomainhook; link; link=link->next) { 2132 if (link->restricthook) { 2133 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2134 } 2135 } 2136 PetscFunctionReturn(0); 2137 } 2138 2139 #undef __FUNCT__ 2140 #define __FUNCT__ "DMGetCoarsenLevel" 2141 /*@ 2142 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2143 2144 Not Collective 2145 2146 Input Parameter: 2147 . dm - the DM object 2148 2149 Output Parameter: 2150 . level - number of coarsenings 2151 2152 Level: developer 2153 2154 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2155 2156 @*/ 2157 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2158 { 2159 PetscFunctionBegin; 2160 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2161 *level = dm->leveldown; 2162 PetscFunctionReturn(0); 2163 } 2164 2165 2166 2167 #undef __FUNCT__ 2168 #define __FUNCT__ "DMRefineHierarchy" 2169 /*@C 2170 DMRefineHierarchy - Refines a DM object, all levels at once 2171 2172 Collective on DM 2173 2174 Input Parameter: 2175 + dm - the DM object 2176 - nlevels - the number of levels of refinement 2177 2178 Output Parameter: 2179 . dmf - the refined DM hierarchy 2180 2181 Level: developer 2182 2183 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2184 2185 @*/ 2186 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2187 { 2188 PetscErrorCode ierr; 2189 2190 PetscFunctionBegin; 2191 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2192 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2193 if (nlevels == 0) PetscFunctionReturn(0); 2194 if (dm->ops->refinehierarchy) { 2195 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2196 } else if (dm->ops->refine) { 2197 PetscInt i; 2198 2199 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2200 for (i=1; i<nlevels; i++) { 2201 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2202 } 2203 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2204 PetscFunctionReturn(0); 2205 } 2206 2207 #undef __FUNCT__ 2208 #define __FUNCT__ "DMCoarsenHierarchy" 2209 /*@C 2210 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2211 2212 Collective on DM 2213 2214 Input Parameter: 2215 + dm - the DM object 2216 - nlevels - the number of levels of coarsening 2217 2218 Output Parameter: 2219 . dmc - the coarsened DM hierarchy 2220 2221 Level: developer 2222 2223 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2224 2225 @*/ 2226 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2227 { 2228 PetscErrorCode ierr; 2229 2230 PetscFunctionBegin; 2231 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2232 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2233 if (nlevels == 0) PetscFunctionReturn(0); 2234 PetscValidPointer(dmc,3); 2235 if (dm->ops->coarsenhierarchy) { 2236 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2237 } else if (dm->ops->coarsen) { 2238 PetscInt i; 2239 2240 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2241 for (i=1; i<nlevels; i++) { 2242 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2243 } 2244 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2245 PetscFunctionReturn(0); 2246 } 2247 2248 #undef __FUNCT__ 2249 #define __FUNCT__ "DMCreateAggregates" 2250 /*@ 2251 DMCreateAggregates - Gets the aggregates that map between 2252 grids associated with two DMs. 2253 2254 Collective on DM 2255 2256 Input Parameters: 2257 + dmc - the coarse grid DM 2258 - dmf - the fine grid DM 2259 2260 Output Parameters: 2261 . rest - the restriction matrix (transpose of the projection matrix) 2262 2263 Level: intermediate 2264 2265 .keywords: interpolation, restriction, multigrid 2266 2267 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2268 @*/ 2269 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2270 { 2271 PetscErrorCode ierr; 2272 2273 PetscFunctionBegin; 2274 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2275 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2276 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2277 PetscFunctionReturn(0); 2278 } 2279 2280 #undef __FUNCT__ 2281 #define __FUNCT__ "DMSetApplicationContextDestroy" 2282 /*@C 2283 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2284 2285 Not Collective 2286 2287 Input Parameters: 2288 + dm - the DM object 2289 - destroy - the destroy function 2290 2291 Level: intermediate 2292 2293 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2294 2295 @*/ 2296 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2297 { 2298 PetscFunctionBegin; 2299 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2300 dm->ctxdestroy = destroy; 2301 PetscFunctionReturn(0); 2302 } 2303 2304 #undef __FUNCT__ 2305 #define __FUNCT__ "DMSetApplicationContext" 2306 /*@ 2307 DMSetApplicationContext - Set a user context into a DM object 2308 2309 Not Collective 2310 2311 Input Parameters: 2312 + dm - the DM object 2313 - ctx - the user context 2314 2315 Level: intermediate 2316 2317 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2318 2319 @*/ 2320 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2321 { 2322 PetscFunctionBegin; 2323 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2324 dm->ctx = ctx; 2325 PetscFunctionReturn(0); 2326 } 2327 2328 #undef __FUNCT__ 2329 #define __FUNCT__ "DMGetApplicationContext" 2330 /*@ 2331 DMGetApplicationContext - Gets a user context from a DM object 2332 2333 Not Collective 2334 2335 Input Parameter: 2336 . dm - the DM object 2337 2338 Output Parameter: 2339 . ctx - the user context 2340 2341 Level: intermediate 2342 2343 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2344 2345 @*/ 2346 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2347 { 2348 PetscFunctionBegin; 2349 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2350 *(void**)ctx = dm->ctx; 2351 PetscFunctionReturn(0); 2352 } 2353 2354 #undef __FUNCT__ 2355 #define __FUNCT__ "DMSetVariableBounds" 2356 /*@C 2357 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2358 2359 Logically Collective on DM 2360 2361 Input Parameter: 2362 + dm - the DM object 2363 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2364 2365 Level: intermediate 2366 2367 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2368 DMSetJacobian() 2369 2370 @*/ 2371 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2372 { 2373 PetscFunctionBegin; 2374 dm->ops->computevariablebounds = f; 2375 PetscFunctionReturn(0); 2376 } 2377 2378 #undef __FUNCT__ 2379 #define __FUNCT__ "DMHasVariableBounds" 2380 /*@ 2381 DMHasVariableBounds - does the DM object have a variable bounds function? 2382 2383 Not Collective 2384 2385 Input Parameter: 2386 . dm - the DM object to destroy 2387 2388 Output Parameter: 2389 . flg - PETSC_TRUE if the variable bounds function exists 2390 2391 Level: developer 2392 2393 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2394 2395 @*/ 2396 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2397 { 2398 PetscFunctionBegin; 2399 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2400 PetscFunctionReturn(0); 2401 } 2402 2403 #undef __FUNCT__ 2404 #define __FUNCT__ "DMComputeVariableBounds" 2405 /*@C 2406 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2407 2408 Logically Collective on DM 2409 2410 Input Parameters: 2411 + dm - the DM object to destroy 2412 - x - current solution at which the bounds are computed 2413 2414 Output parameters: 2415 + xl - lower bound 2416 - xu - upper bound 2417 2418 Level: intermediate 2419 2420 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2421 2422 @*/ 2423 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2424 { 2425 PetscErrorCode ierr; 2426 2427 PetscFunctionBegin; 2428 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2429 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2430 if (dm->ops->computevariablebounds) { 2431 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2432 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2433 PetscFunctionReturn(0); 2434 } 2435 2436 #undef __FUNCT__ 2437 #define __FUNCT__ "DMHasColoring" 2438 /*@ 2439 DMHasColoring - does the DM object have a method of providing a coloring? 2440 2441 Not Collective 2442 2443 Input Parameter: 2444 . dm - the DM object 2445 2446 Output Parameter: 2447 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2448 2449 Level: developer 2450 2451 .seealso DMHasFunction(), DMCreateColoring() 2452 2453 @*/ 2454 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2455 { 2456 PetscFunctionBegin; 2457 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2458 PetscFunctionReturn(0); 2459 } 2460 2461 #undef __FUNCT__ 2462 #define __FUNCT__ "DMSetVec" 2463 /*@C 2464 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2465 2466 Collective on DM 2467 2468 Input Parameter: 2469 + dm - the DM object 2470 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2471 2472 Level: developer 2473 2474 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2475 2476 @*/ 2477 PetscErrorCode DMSetVec(DM dm,Vec x) 2478 { 2479 PetscErrorCode ierr; 2480 2481 PetscFunctionBegin; 2482 if (x) { 2483 if (!dm->x) { 2484 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2485 } 2486 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2487 } else if (dm->x) { 2488 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2489 } 2490 PetscFunctionReturn(0); 2491 } 2492 2493 PetscFunctionList DMList = NULL; 2494 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2495 2496 #undef __FUNCT__ 2497 #define __FUNCT__ "DMSetType" 2498 /*@C 2499 DMSetType - Builds a DM, for a particular DM implementation. 2500 2501 Collective on DM 2502 2503 Input Parameters: 2504 + dm - The DM object 2505 - method - The name of the DM type 2506 2507 Options Database Key: 2508 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2509 2510 Notes: 2511 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2512 2513 Level: intermediate 2514 2515 .keywords: DM, set, type 2516 .seealso: DMGetType(), DMCreate() 2517 @*/ 2518 PetscErrorCode DMSetType(DM dm, DMType method) 2519 { 2520 PetscErrorCode (*r)(DM); 2521 PetscBool match; 2522 PetscErrorCode ierr; 2523 2524 PetscFunctionBegin; 2525 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2526 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2527 if (match) PetscFunctionReturn(0); 2528 2529 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2530 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2531 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2532 2533 if (dm->ops->destroy) { 2534 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2535 dm->ops->destroy = NULL; 2536 } 2537 ierr = (*r)(dm);CHKERRQ(ierr); 2538 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2539 PetscFunctionReturn(0); 2540 } 2541 2542 #undef __FUNCT__ 2543 #define __FUNCT__ "DMGetType" 2544 /*@C 2545 DMGetType - Gets the DM type name (as a string) from the DM. 2546 2547 Not Collective 2548 2549 Input Parameter: 2550 . dm - The DM 2551 2552 Output Parameter: 2553 . type - The DM type name 2554 2555 Level: intermediate 2556 2557 .keywords: DM, get, type, name 2558 .seealso: DMSetType(), DMCreate() 2559 @*/ 2560 PetscErrorCode DMGetType(DM dm, DMType *type) 2561 { 2562 PetscErrorCode ierr; 2563 2564 PetscFunctionBegin; 2565 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2566 PetscValidCharPointer(type,2); 2567 if (!DMRegisterAllCalled) { 2568 ierr = DMRegisterAll();CHKERRQ(ierr); 2569 } 2570 *type = ((PetscObject)dm)->type_name; 2571 PetscFunctionReturn(0); 2572 } 2573 2574 #undef __FUNCT__ 2575 #define __FUNCT__ "DMConvert" 2576 /*@C 2577 DMConvert - Converts a DM to another DM, either of the same or different type. 2578 2579 Collective on DM 2580 2581 Input Parameters: 2582 + dm - the DM 2583 - newtype - new DM type (use "same" for the same type) 2584 2585 Output Parameter: 2586 . M - pointer to new DM 2587 2588 Notes: 2589 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2590 the MPI communicator of the generated DM is always the same as the communicator 2591 of the input DM. 2592 2593 Level: intermediate 2594 2595 .seealso: DMCreate() 2596 @*/ 2597 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2598 { 2599 DM B; 2600 char convname[256]; 2601 PetscBool sametype, issame; 2602 PetscErrorCode ierr; 2603 2604 PetscFunctionBegin; 2605 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2606 PetscValidType(dm,1); 2607 PetscValidPointer(M,3); 2608 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2609 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2610 { 2611 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2612 2613 /* 2614 Order of precedence: 2615 1) See if a specialized converter is known to the current DM. 2616 2) See if a specialized converter is known to the desired DM class. 2617 3) See if a good general converter is registered for the desired class 2618 4) See if a good general converter is known for the current matrix. 2619 5) Use a really basic converter. 2620 */ 2621 2622 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2623 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2624 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2625 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2626 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2627 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2628 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2629 if (conv) goto foundconv; 2630 2631 /* 2) See if a specialized converter is known to the desired DM class. */ 2632 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2633 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2634 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2635 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2636 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2637 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2638 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2639 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2640 if (conv) { 2641 ierr = DMDestroy(&B);CHKERRQ(ierr); 2642 goto foundconv; 2643 } 2644 2645 #if 0 2646 /* 3) See if a good general converter is registered for the desired class */ 2647 conv = B->ops->convertfrom; 2648 ierr = DMDestroy(&B);CHKERRQ(ierr); 2649 if (conv) goto foundconv; 2650 2651 /* 4) See if a good general converter is known for the current matrix */ 2652 if (dm->ops->convert) { 2653 conv = dm->ops->convert; 2654 } 2655 if (conv) goto foundconv; 2656 #endif 2657 2658 /* 5) Use a really basic converter. */ 2659 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2660 2661 foundconv: 2662 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2663 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2664 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2665 } 2666 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2667 PetscFunctionReturn(0); 2668 } 2669 2670 /*--------------------------------------------------------------------------------------------------------------------*/ 2671 2672 #undef __FUNCT__ 2673 #define __FUNCT__ "DMRegister" 2674 /*@C 2675 DMRegister - Adds a new DM component implementation 2676 2677 Not Collective 2678 2679 Input Parameters: 2680 + name - The name of a new user-defined creation routine 2681 - create_func - The creation routine itself 2682 2683 Notes: 2684 DMRegister() may be called multiple times to add several user-defined DMs 2685 2686 2687 Sample usage: 2688 .vb 2689 DMRegister("my_da", MyDMCreate); 2690 .ve 2691 2692 Then, your DM type can be chosen with the procedural interface via 2693 .vb 2694 DMCreate(MPI_Comm, DM *); 2695 DMSetType(DM,"my_da"); 2696 .ve 2697 or at runtime via the option 2698 .vb 2699 -da_type my_da 2700 .ve 2701 2702 Level: advanced 2703 2704 .keywords: DM, register 2705 .seealso: DMRegisterAll(), DMRegisterDestroy() 2706 2707 @*/ 2708 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2709 { 2710 PetscErrorCode ierr; 2711 2712 PetscFunctionBegin; 2713 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2714 PetscFunctionReturn(0); 2715 } 2716 2717 #undef __FUNCT__ 2718 #define __FUNCT__ "DMLoad" 2719 /*@C 2720 DMLoad - Loads a DM that has been stored in binary with DMView(). 2721 2722 Collective on PetscViewer 2723 2724 Input Parameters: 2725 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2726 some related function before a call to DMLoad(). 2727 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2728 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2729 2730 Level: intermediate 2731 2732 Notes: 2733 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2734 2735 Notes for advanced users: 2736 Most users should not need to know the details of the binary storage 2737 format, since DMLoad() and DMView() completely hide these details. 2738 But for anyone who's interested, the standard binary matrix storage 2739 format is 2740 .vb 2741 has not yet been determined 2742 .ve 2743 2744 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2745 @*/ 2746 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2747 { 2748 PetscErrorCode ierr; 2749 PetscBool isbinary; 2750 PetscInt classid; 2751 char type[256]; 2752 2753 PetscFunctionBegin; 2754 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2755 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2756 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2757 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2758 2759 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2760 if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file"); 2761 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2762 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2763 if (newdm->ops->load) { 2764 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2765 } 2766 PetscFunctionReturn(0); 2767 } 2768 2769 /******************************** FEM Support **********************************/ 2770 2771 #undef __FUNCT__ 2772 #define __FUNCT__ "DMPrintCellVector" 2773 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2774 { 2775 PetscInt f; 2776 PetscErrorCode ierr; 2777 2778 PetscFunctionBegin; 2779 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2780 for (f = 0; f < len; ++f) { 2781 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2782 } 2783 PetscFunctionReturn(0); 2784 } 2785 2786 #undef __FUNCT__ 2787 #define __FUNCT__ "DMPrintCellMatrix" 2788 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2789 { 2790 PetscInt f, g; 2791 PetscErrorCode ierr; 2792 2793 PetscFunctionBegin; 2794 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2795 for (f = 0; f < rows; ++f) { 2796 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2797 for (g = 0; g < cols; ++g) { 2798 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2799 } 2800 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2801 } 2802 PetscFunctionReturn(0); 2803 } 2804 2805 #undef __FUNCT__ 2806 #define __FUNCT__ "DMPrintLocalVec" 2807 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X) 2808 { 2809 PetscMPIInt rank, numProcs; 2810 PetscInt p; 2811 PetscErrorCode ierr; 2812 2813 PetscFunctionBegin; 2814 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr); 2815 ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr); 2816 ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr); 2817 for (p = 0; p < numProcs; ++p) { 2818 if (p == rank) { 2819 Vec x; 2820 2821 ierr = VecDuplicate(X, &x);CHKERRQ(ierr); 2822 ierr = VecCopy(X, x);CHKERRQ(ierr); 2823 ierr = VecChop(x, tol);CHKERRQ(ierr); 2824 ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2825 ierr = VecDestroy(&x);CHKERRQ(ierr); 2826 ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr); 2827 } 2828 ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr); 2829 } 2830 PetscFunctionReturn(0); 2831 } 2832 2833 #undef __FUNCT__ 2834 #define __FUNCT__ "DMGetDefaultSection" 2835 /*@ 2836 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2837 2838 Input Parameter: 2839 . dm - The DM 2840 2841 Output Parameter: 2842 . section - The PetscSection 2843 2844 Level: intermediate 2845 2846 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2847 2848 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2849 @*/ 2850 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2851 { 2852 PetscFunctionBegin; 2853 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2854 PetscValidPointer(section, 2); 2855 *section = dm->defaultSection; 2856 PetscFunctionReturn(0); 2857 } 2858 2859 #undef __FUNCT__ 2860 #define __FUNCT__ "DMSetDefaultSection" 2861 /*@ 2862 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2863 2864 Input Parameters: 2865 + dm - The DM 2866 - section - The PetscSection 2867 2868 Level: intermediate 2869 2870 Note: Any existing Section will be destroyed 2871 2872 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2873 @*/ 2874 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2875 { 2876 PetscInt numFields; 2877 PetscInt f; 2878 PetscErrorCode ierr; 2879 2880 PetscFunctionBegin; 2881 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2882 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2883 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2884 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2885 dm->defaultSection = section; 2886 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2887 if (numFields) { 2888 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2889 for (f = 0; f < numFields; ++f) { 2890 const char *name; 2891 2892 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2893 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2894 } 2895 } 2896 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2897 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2898 PetscFunctionReturn(0); 2899 } 2900 2901 #undef __FUNCT__ 2902 #define __FUNCT__ "DMGetDefaultGlobalSection" 2903 /*@ 2904 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2905 2906 Collective on DM 2907 2908 Input Parameter: 2909 . dm - The DM 2910 2911 Output Parameter: 2912 . section - The PetscSection 2913 2914 Level: intermediate 2915 2916 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2917 2918 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2919 @*/ 2920 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2921 { 2922 PetscErrorCode ierr; 2923 2924 PetscFunctionBegin; 2925 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2926 PetscValidPointer(section, 2); 2927 if (!dm->defaultGlobalSection) { 2928 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2929 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2930 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2931 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2932 } 2933 *section = dm->defaultGlobalSection; 2934 PetscFunctionReturn(0); 2935 } 2936 2937 #undef __FUNCT__ 2938 #define __FUNCT__ "DMSetDefaultGlobalSection" 2939 /*@ 2940 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2941 2942 Input Parameters: 2943 + dm - The DM 2944 - section - The PetscSection, or NULL 2945 2946 Level: intermediate 2947 2948 Note: Any existing Section will be destroyed 2949 2950 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2951 @*/ 2952 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2953 { 2954 PetscErrorCode ierr; 2955 2956 PetscFunctionBegin; 2957 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2958 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2959 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2960 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2961 dm->defaultGlobalSection = section; 2962 PetscFunctionReturn(0); 2963 } 2964 2965 #undef __FUNCT__ 2966 #define __FUNCT__ "DMGetDefaultSF" 2967 /*@ 2968 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2969 it is created from the default PetscSection layouts in the DM. 2970 2971 Input Parameter: 2972 . dm - The DM 2973 2974 Output Parameter: 2975 . sf - The PetscSF 2976 2977 Level: intermediate 2978 2979 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2980 2981 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 2982 @*/ 2983 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 2984 { 2985 PetscInt nroots; 2986 PetscErrorCode ierr; 2987 2988 PetscFunctionBegin; 2989 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2990 PetscValidPointer(sf, 2); 2991 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 2992 if (nroots < 0) { 2993 PetscSection section, gSection; 2994 2995 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 2996 if (section) { 2997 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 2998 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 2999 } else { 3000 *sf = NULL; 3001 PetscFunctionReturn(0); 3002 } 3003 } 3004 *sf = dm->defaultSF; 3005 PetscFunctionReturn(0); 3006 } 3007 3008 #undef __FUNCT__ 3009 #define __FUNCT__ "DMSetDefaultSF" 3010 /*@ 3011 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3012 3013 Input Parameters: 3014 + dm - The DM 3015 - sf - The PetscSF 3016 3017 Level: intermediate 3018 3019 Note: Any previous SF is destroyed 3020 3021 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3022 @*/ 3023 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 3024 { 3025 PetscErrorCode ierr; 3026 3027 PetscFunctionBegin; 3028 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3029 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3030 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3031 dm->defaultSF = sf; 3032 PetscFunctionReturn(0); 3033 } 3034 3035 #undef __FUNCT__ 3036 #define __FUNCT__ "DMCreateDefaultSF" 3037 /*@C 3038 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3039 describing the data layout. 3040 3041 Input Parameters: 3042 + dm - The DM 3043 . localSection - PetscSection describing the local data layout 3044 - globalSection - PetscSection describing the global data layout 3045 3046 Level: intermediate 3047 3048 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3049 @*/ 3050 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3051 { 3052 MPI_Comm comm; 3053 PetscLayout layout; 3054 const PetscInt *ranges; 3055 PetscInt *local; 3056 PetscSFNode *remote; 3057 PetscInt pStart, pEnd, p, nroots, nleaves = 0, l; 3058 PetscMPIInt size, rank; 3059 PetscErrorCode ierr; 3060 3061 PetscFunctionBegin; 3062 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3063 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3064 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3065 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3066 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3067 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3068 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3069 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3070 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3071 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3072 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3073 for (p = pStart; p < pEnd; ++p) { 3074 PetscInt gdof, gcdof; 3075 3076 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3077 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3078 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3079 } 3080 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3081 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3082 for (p = pStart, l = 0; p < pEnd; ++p) { 3083 const PetscInt *cind; 3084 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3085 3086 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3087 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3088 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3089 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3090 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3091 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3092 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3093 if (!gdof) continue; /* Censored point */ 3094 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3095 if (gsize != dof-cdof) { 3096 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3097 cdof = 0; /* Ignore constraints */ 3098 } 3099 for (d = 0, c = 0; d < dof; ++d) { 3100 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3101 local[l+d-c] = off+d; 3102 } 3103 if (gdof < 0) { 3104 for (d = 0; d < gsize; ++d, ++l) { 3105 PetscInt offset = -(goff+1) + d, r; 3106 3107 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 3108 if (r < 0) r = -(r+2); 3109 remote[l].rank = r; 3110 remote[l].index = offset - ranges[r]; 3111 } 3112 } else { 3113 for (d = 0; d < gsize; ++d, ++l) { 3114 remote[l].rank = rank; 3115 remote[l].index = goff+d - ranges[rank]; 3116 } 3117 } 3118 } 3119 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3120 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3121 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3122 PetscFunctionReturn(0); 3123 } 3124 3125 #undef __FUNCT__ 3126 #define __FUNCT__ "DMGetPointSF" 3127 /*@ 3128 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3129 3130 Input Parameter: 3131 . dm - The DM 3132 3133 Output Parameter: 3134 . sf - The PetscSF 3135 3136 Level: intermediate 3137 3138 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3139 3140 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3141 @*/ 3142 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3143 { 3144 PetscFunctionBegin; 3145 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3146 PetscValidPointer(sf, 2); 3147 *sf = dm->sf; 3148 PetscFunctionReturn(0); 3149 } 3150 3151 #undef __FUNCT__ 3152 #define __FUNCT__ "DMSetPointSF" 3153 /*@ 3154 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3155 3156 Input Parameters: 3157 + dm - The DM 3158 - sf - The PetscSF 3159 3160 Level: intermediate 3161 3162 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3163 @*/ 3164 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3165 { 3166 PetscErrorCode ierr; 3167 3168 PetscFunctionBegin; 3169 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3170 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3171 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3172 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3173 dm->sf = sf; 3174 PetscFunctionReturn(0); 3175 } 3176 3177 #undef __FUNCT__ 3178 #define __FUNCT__ "DMGetNumFields" 3179 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3180 { 3181 PetscFunctionBegin; 3182 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3183 PetscValidPointer(numFields, 2); 3184 *numFields = dm->numFields; 3185 PetscFunctionReturn(0); 3186 } 3187 3188 #undef __FUNCT__ 3189 #define __FUNCT__ "DMSetNumFields" 3190 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3191 { 3192 PetscInt f; 3193 PetscErrorCode ierr; 3194 3195 PetscFunctionBegin; 3196 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3197 for (f = 0; f < dm->numFields; ++f) { 3198 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3199 } 3200 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3201 dm->numFields = numFields; 3202 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3203 for (f = 0; f < dm->numFields; ++f) { 3204 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3205 } 3206 PetscFunctionReturn(0); 3207 } 3208 3209 #undef __FUNCT__ 3210 #define __FUNCT__ "DMGetField" 3211 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3212 { 3213 PetscFunctionBegin; 3214 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3215 PetscValidPointer(field, 2); 3216 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3217 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3218 *field = dm->fields[f]; 3219 PetscFunctionReturn(0); 3220 } 3221 3222 #undef __FUNCT__ 3223 #define __FUNCT__ "DMRestrictHook_Coordinates" 3224 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3225 { 3226 DM dm_coord,dmc_coord; 3227 PetscErrorCode ierr; 3228 Vec coords,ccoords; 3229 VecScatter scat; 3230 PetscFunctionBegin; 3231 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3232 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3233 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3234 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3235 if (coords && !ccoords) { 3236 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3237 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3238 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3239 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3240 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3241 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3242 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3243 } 3244 PetscFunctionReturn(0); 3245 } 3246 3247 #undef __FUNCT__ 3248 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3249 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3250 { 3251 DM dm_coord,subdm_coord; 3252 PetscErrorCode ierr; 3253 Vec coords,ccoords,clcoords; 3254 VecScatter *scat_i,*scat_g; 3255 PetscFunctionBegin; 3256 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3257 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3258 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3259 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3260 if (coords && !ccoords) { 3261 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3262 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3263 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3264 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3265 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3266 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3267 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3268 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3269 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3270 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3271 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3272 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3273 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3274 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3275 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3276 } 3277 PetscFunctionReturn(0); 3278 } 3279 3280 #undef __FUNCT__ 3281 #define __FUNCT__ "DMSetCoordinates" 3282 /*@ 3283 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3284 3285 Collective on DM 3286 3287 Input Parameters: 3288 + dm - the DM 3289 - c - coordinate vector 3290 3291 Note: 3292 The coordinates do include those for ghost points, which are in the local vector 3293 3294 Level: intermediate 3295 3296 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3297 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3298 @*/ 3299 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3300 { 3301 PetscErrorCode ierr; 3302 3303 PetscFunctionBegin; 3304 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3305 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3306 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3307 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3308 dm->coordinates = c; 3309 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3310 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3311 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3312 PetscFunctionReturn(0); 3313 } 3314 3315 #undef __FUNCT__ 3316 #define __FUNCT__ "DMSetCoordinatesLocal" 3317 /*@ 3318 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3319 3320 Collective on DM 3321 3322 Input Parameters: 3323 + dm - the DM 3324 - c - coordinate vector 3325 3326 Note: 3327 The coordinates of ghost points can be set using DMSetCoordinates() 3328 followed by DMGetCoordinatesLocal(). This is intended to enable the 3329 setting of ghost coordinates outside of the domain. 3330 3331 Level: intermediate 3332 3333 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3334 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3335 @*/ 3336 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3337 { 3338 PetscErrorCode ierr; 3339 3340 PetscFunctionBegin; 3341 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3342 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3343 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3344 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3345 3346 dm->coordinatesLocal = c; 3347 3348 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3349 PetscFunctionReturn(0); 3350 } 3351 3352 #undef __FUNCT__ 3353 #define __FUNCT__ "DMGetCoordinates" 3354 /*@ 3355 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3356 3357 Not Collective 3358 3359 Input Parameter: 3360 . dm - the DM 3361 3362 Output Parameter: 3363 . c - global coordinate vector 3364 3365 Note: 3366 This is a borrowed reference, so the user should NOT destroy this vector 3367 3368 Each process has only the local coordinates (does NOT have the ghost coordinates). 3369 3370 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3371 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3372 3373 Level: intermediate 3374 3375 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3376 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3377 @*/ 3378 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3379 { 3380 PetscErrorCode ierr; 3381 3382 PetscFunctionBegin; 3383 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3384 PetscValidPointer(c,2); 3385 if (!dm->coordinates && dm->coordinatesLocal) { 3386 DM cdm = NULL; 3387 3388 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3389 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3390 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3391 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3392 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3393 } 3394 *c = dm->coordinates; 3395 PetscFunctionReturn(0); 3396 } 3397 3398 #undef __FUNCT__ 3399 #define __FUNCT__ "DMGetCoordinatesLocal" 3400 /*@ 3401 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3402 3403 Collective on DM 3404 3405 Input Parameter: 3406 . dm - the DM 3407 3408 Output Parameter: 3409 . c - coordinate vector 3410 3411 Note: 3412 This is a borrowed reference, so the user should NOT destroy this vector 3413 3414 Each process has the local and ghost coordinates 3415 3416 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3417 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3418 3419 Level: intermediate 3420 3421 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3422 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3423 @*/ 3424 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3425 { 3426 PetscErrorCode ierr; 3427 3428 PetscFunctionBegin; 3429 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3430 PetscValidPointer(c,2); 3431 if (!dm->coordinatesLocal && dm->coordinates) { 3432 DM cdm = NULL; 3433 3434 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3435 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3436 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3437 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3438 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3439 } 3440 *c = dm->coordinatesLocal; 3441 PetscFunctionReturn(0); 3442 } 3443 3444 #undef __FUNCT__ 3445 #define __FUNCT__ "DMGetCoordinateDM" 3446 /*@ 3447 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3448 3449 Collective on DM 3450 3451 Input Parameter: 3452 . dm - the DM 3453 3454 Output Parameter: 3455 . cdm - coordinate DM 3456 3457 Level: intermediate 3458 3459 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3460 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3461 @*/ 3462 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3463 { 3464 PetscErrorCode ierr; 3465 3466 PetscFunctionBegin; 3467 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3468 PetscValidPointer(cdm,2); 3469 if (!dm->coordinateDM) { 3470 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3471 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3472 } 3473 *cdm = dm->coordinateDM; 3474 PetscFunctionReturn(0); 3475 } 3476 3477 #undef __FUNCT__ 3478 #define __FUNCT__ "DMLocatePoints" 3479 /*@ 3480 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3481 3482 Not collective 3483 3484 Input Parameters: 3485 + dm - The DM 3486 - v - The Vec of points 3487 3488 Output Parameter: 3489 . cells - The local cell numbers for cells which contain the points 3490 3491 Level: developer 3492 3493 .keywords: point location, mesh 3494 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3495 @*/ 3496 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3497 { 3498 PetscErrorCode ierr; 3499 3500 PetscFunctionBegin; 3501 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3502 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3503 PetscValidPointer(cells,3); 3504 if (dm->ops->locatepoints) { 3505 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3506 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3507 PetscFunctionReturn(0); 3508 } 3509