1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMViewFromOptions" 9 /* 10 DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed. 11 12 Collective on Vec 13 14 Input Parameters: 15 + dm - the DM 16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm' 17 - optionname - option to activate viewing 18 19 Level: intermediate 20 21 .keywords: DM, view, options, database 22 .seealso: VecViewFromOptions(), MatViewFromOptions() 23 */ 24 PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[]) 25 { 26 PetscErrorCode ierr; 27 PetscBool flg; 28 PetscViewer viewer; 29 PetscViewerFormat format; 30 31 PetscFunctionBegin; 32 if (prefix) { 33 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 34 } else { 35 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 36 } 37 if (flg) { 38 ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr); 39 ierr = DMView(dm,viewer);CHKERRQ(ierr); 40 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 41 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 42 } 43 PetscFunctionReturn(0); 44 } 45 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMCreate" 49 /*@ 50 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 51 52 If you never call DMSetType() it will generate an 53 error when you try to use the vector. 54 55 Collective on MPI_Comm 56 57 Input Parameter: 58 . comm - The communicator for the DM object 59 60 Output Parameter: 61 . dm - The DM object 62 63 Level: beginner 64 65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 66 @*/ 67 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 68 { 69 DM v; 70 PetscErrorCode ierr; 71 72 PetscFunctionBegin; 73 PetscValidPointer(dm,2); 74 *dm = NULL; 75 ierr = VecInitializePackage();CHKERRQ(ierr); 76 ierr = MatInitializePackage();CHKERRQ(ierr); 77 ierr = DMInitializePackage();CHKERRQ(ierr); 78 79 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 80 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 81 82 83 v->ltogmap = NULL; 84 v->ltogmapb = NULL; 85 v->bs = 1; 86 v->coloringtype = IS_COLORING_GLOBAL; 87 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 88 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 89 v->defaultSection = NULL; 90 v->defaultGlobalSection = NULL; 91 { 92 PetscInt i; 93 for (i = 0; i < 10; ++i) { 94 v->nullspaceConstructors[i] = NULL; 95 } 96 } 97 v->numFields = 0; 98 v->fields = NULL; 99 100 *dm = v; 101 PetscFunctionReturn(0); 102 } 103 104 #undef __FUNCT__ 105 #define __FUNCT__ "DMClone" 106 /*@ 107 DMClone - Creates a DM object with the same topology as the original. 108 109 Collective on MPI_Comm 110 111 Input Parameter: 112 . dm - The original DM object 113 114 Output Parameter: 115 . newdm - The new DM object 116 117 Level: beginner 118 119 .keywords: DM, topology, create 120 @*/ 121 PetscErrorCode DMClone(DM dm, DM *newdm) 122 { 123 PetscSF sf; 124 Vec coords; 125 void *ctx; 126 PetscErrorCode ierr; 127 128 PetscFunctionBegin; 129 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 130 PetscValidPointer(newdm,2); 131 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 132 if (dm->ops->clone) { 133 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 134 } 135 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 136 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 137 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 138 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 139 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 140 if (coords) { 141 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 142 } else { 143 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 144 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 145 } 146 PetscFunctionReturn(0); 147 } 148 149 #undef __FUNCT__ 150 #define __FUNCT__ "DMSetVecType" 151 /*@C 152 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 153 154 Logically Collective on DMDA 155 156 Input Parameter: 157 + da - initial distributed array 158 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 159 160 Options Database: 161 . -dm_vec_type ctype 162 163 Level: intermediate 164 165 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 166 @*/ 167 PetscErrorCode DMSetVecType(DM da,VecType ctype) 168 { 169 PetscErrorCode ierr; 170 171 PetscFunctionBegin; 172 PetscValidHeaderSpecific(da,DM_CLASSID,1); 173 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 174 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 175 PetscFunctionReturn(0); 176 } 177 178 #undef __FUNCT__ 179 #define __FUNCT__ "VecGetDM" 180 /*@ 181 VecGetDM - Gets the DM defining the data layout of the vector 182 183 Not collective 184 185 Input Parameter: 186 . v - The Vec 187 188 Output Parameter: 189 . dm - The DM 190 191 Level: intermediate 192 193 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 194 @*/ 195 PetscErrorCode VecGetDM(Vec v, DM *dm) 196 { 197 PetscErrorCode ierr; 198 199 PetscFunctionBegin; 200 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 201 PetscValidPointer(dm,2); 202 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 203 PetscFunctionReturn(0); 204 } 205 206 #undef __FUNCT__ 207 #define __FUNCT__ "VecSetDM" 208 /*@ 209 VecSetDM - Sets the DM defining the data layout of the vector 210 211 Not collective 212 213 Input Parameters: 214 + v - The Vec 215 - dm - The DM 216 217 Level: intermediate 218 219 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 220 @*/ 221 PetscErrorCode VecSetDM(Vec v, DM dm) 222 { 223 PetscErrorCode ierr; 224 225 PetscFunctionBegin; 226 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 227 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 228 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 229 PetscFunctionReturn(0); 230 } 231 232 #undef __FUNCT__ 233 #define __FUNCT__ "DMSetMatType" 234 /*@C 235 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 236 237 Logically Collective on DM 238 239 Input Parameter: 240 + dm - the DM context 241 . ctype - the matrix type 242 243 Options Database: 244 . -dm_mat_type ctype 245 246 Level: intermediate 247 248 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 249 @*/ 250 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 251 { 252 PetscErrorCode ierr; 253 254 PetscFunctionBegin; 255 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 256 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 257 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 258 PetscFunctionReturn(0); 259 } 260 261 #undef __FUNCT__ 262 #define __FUNCT__ "MatGetDM" 263 /*@ 264 MatGetDM - Gets the DM defining the data layout of the matrix 265 266 Not collective 267 268 Input Parameter: 269 . A - The Mat 270 271 Output Parameter: 272 . dm - The DM 273 274 Level: intermediate 275 276 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 277 @*/ 278 PetscErrorCode MatGetDM(Mat A, DM *dm) 279 { 280 PetscErrorCode ierr; 281 282 PetscFunctionBegin; 283 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 284 PetscValidPointer(dm,2); 285 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 286 PetscFunctionReturn(0); 287 } 288 289 #undef __FUNCT__ 290 #define __FUNCT__ "MatSetDM" 291 /*@ 292 MatSetDM - Sets the DM defining the data layout of the matrix 293 294 Not collective 295 296 Input Parameters: 297 + A - The Mat 298 - dm - The DM 299 300 Level: intermediate 301 302 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 303 @*/ 304 PetscErrorCode MatSetDM(Mat A, DM dm) 305 { 306 PetscErrorCode ierr; 307 308 PetscFunctionBegin; 309 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 310 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 311 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 312 PetscFunctionReturn(0); 313 } 314 315 #undef __FUNCT__ 316 #define __FUNCT__ "DMSetOptionsPrefix" 317 /*@C 318 DMSetOptionsPrefix - Sets the prefix used for searching for all 319 DMDA options in the database. 320 321 Logically Collective on DMDA 322 323 Input Parameter: 324 + da - the DMDA context 325 - prefix - the prefix to prepend to all option names 326 327 Notes: 328 A hyphen (-) must NOT be given at the beginning of the prefix name. 329 The first character of all runtime options is AUTOMATICALLY the hyphen. 330 331 Level: advanced 332 333 .keywords: DMDA, set, options, prefix, database 334 335 .seealso: DMSetFromOptions() 336 @*/ 337 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 338 { 339 PetscErrorCode ierr; 340 341 PetscFunctionBegin; 342 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 343 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 344 PetscFunctionReturn(0); 345 } 346 347 #undef __FUNCT__ 348 #define __FUNCT__ "DMDestroy" 349 /*@ 350 DMDestroy - Destroys a vector packer or DMDA. 351 352 Collective on DM 353 354 Input Parameter: 355 . dm - the DM object to destroy 356 357 Level: developer 358 359 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 360 361 @*/ 362 PetscErrorCode DMDestroy(DM *dm) 363 { 364 PetscInt i, cnt = 0, f; 365 DMNamedVecLink nlink,nnext; 366 PetscErrorCode ierr; 367 368 PetscFunctionBegin; 369 if (!*dm) PetscFunctionReturn(0); 370 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 371 372 /* count all the circular references of DM and its contained Vecs */ 373 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 374 if ((*dm)->localin[i]) cnt++; 375 if ((*dm)->globalin[i]) cnt++; 376 } 377 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 378 if ((*dm)->x) { 379 DM obj; 380 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 381 if (obj == *dm) cnt++; 382 } 383 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 384 if ((*dm)->x) { 385 DM obj; 386 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 387 if (obj == *dm) cnt++; 388 } 389 390 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 391 /* 392 Need this test because the dm references the vectors that 393 reference the dm, so destroying the dm calls destroy on the 394 vectors that cause another destroy on the dm 395 */ 396 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 397 ((PetscObject) (*dm))->refct = 0; 398 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 399 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 400 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 401 } 402 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 403 nnext = nlink->next; 404 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 405 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 406 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 407 ierr = PetscFree(nlink);CHKERRQ(ierr); 408 } 409 (*dm)->namedglobal = NULL; 410 411 for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */ 412 nnext = nlink->next; 413 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 414 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 415 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 416 ierr = PetscFree(nlink);CHKERRQ(ierr); 417 } 418 (*dm)->namedlocal = NULL; 419 420 /* Destroy the list of hooks */ 421 { 422 DMCoarsenHookLink link,next; 423 for (link=(*dm)->coarsenhook; link; link=next) { 424 next = link->next; 425 ierr = PetscFree(link);CHKERRQ(ierr); 426 } 427 (*dm)->coarsenhook = NULL; 428 } 429 { 430 DMRefineHookLink link,next; 431 for (link=(*dm)->refinehook; link; link=next) { 432 next = link->next; 433 ierr = PetscFree(link);CHKERRQ(ierr); 434 } 435 (*dm)->refinehook = NULL; 436 } 437 { 438 DMSubDomainHookLink link,next; 439 for (link=(*dm)->subdomainhook; link; link=next) { 440 next = link->next; 441 ierr = PetscFree(link);CHKERRQ(ierr); 442 } 443 (*dm)->subdomainhook = NULL; 444 } 445 { 446 DMGlobalToLocalHookLink link,next; 447 for (link=(*dm)->gtolhook; link; link=next) { 448 next = link->next; 449 ierr = PetscFree(link);CHKERRQ(ierr); 450 } 451 (*dm)->gtolhook = NULL; 452 } 453 /* Destroy the work arrays */ 454 { 455 DMWorkLink link,next; 456 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 457 for (link=(*dm)->workin; link; link=next) { 458 next = link->next; 459 ierr = PetscFree(link->mem);CHKERRQ(ierr); 460 ierr = PetscFree(link);CHKERRQ(ierr); 461 } 462 (*dm)->workin = NULL; 463 } 464 465 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 466 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 467 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 468 469 if ((*dm)->ctx && (*dm)->ctxdestroy) { 470 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 471 } 472 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 473 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 474 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 475 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 476 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 477 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 478 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 479 480 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 481 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 482 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 483 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 484 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 485 486 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 487 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 488 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 489 490 for (f = 0; f < (*dm)->numFields; ++f) { 491 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 492 } 493 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 494 /* if memory was published with AMS then destroy it */ 495 ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr); 496 497 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 498 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 499 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 500 PetscFunctionReturn(0); 501 } 502 503 #undef __FUNCT__ 504 #define __FUNCT__ "DMSetUp" 505 /*@ 506 DMSetUp - sets up the data structures inside a DM object 507 508 Collective on DM 509 510 Input Parameter: 511 . dm - the DM object to setup 512 513 Level: developer 514 515 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 516 517 @*/ 518 PetscErrorCode DMSetUp(DM dm) 519 { 520 PetscErrorCode ierr; 521 522 PetscFunctionBegin; 523 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 524 if (dm->setupcalled) PetscFunctionReturn(0); 525 if (dm->ops->setup) { 526 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 527 } 528 dm->setupcalled = PETSC_TRUE; 529 PetscFunctionReturn(0); 530 } 531 532 #undef __FUNCT__ 533 #define __FUNCT__ "DMSetFromOptions" 534 /*@ 535 DMSetFromOptions - sets parameters in a DM from the options database 536 537 Collective on DM 538 539 Input Parameter: 540 . dm - the DM object to set options for 541 542 Options Database: 543 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 544 . -dm_vec_type <type> type of vector to create inside DM 545 . -dm_mat_type <type> type of matrix to create inside DM 546 - -dm_coloring_type <global or ghosted> 547 548 Level: developer 549 550 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 551 552 @*/ 553 PetscErrorCode DMSetFromOptions(DM dm) 554 { 555 char typeName[256] = MATAIJ; 556 PetscBool flg; 557 PetscErrorCode ierr; 558 559 PetscFunctionBegin; 560 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 561 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 562 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 563 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 564 if (flg) { 565 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 566 } 567 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 568 if (flg) { 569 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 570 } 571 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 572 if (dm->ops->setfromoptions) { 573 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 574 } 575 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 576 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 577 ierr = PetscOptionsEnd();CHKERRQ(ierr); 578 ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr); 579 PetscFunctionReturn(0); 580 } 581 582 #undef __FUNCT__ 583 #define __FUNCT__ "DMView" 584 /*@C 585 DMView - Views a vector packer or DMDA. 586 587 Collective on DM 588 589 Input Parameter: 590 + dm - the DM object to view 591 - v - the viewer 592 593 Level: developer 594 595 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 596 597 @*/ 598 PetscErrorCode DMView(DM dm,PetscViewer v) 599 { 600 PetscErrorCode ierr; 601 PetscBool isbinary; 602 603 PetscFunctionBegin; 604 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 605 if (!v) { 606 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 607 } 608 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 609 if (isbinary) { 610 PetscInt classid = DM_FILE_CLASSID; 611 char type[256]; 612 613 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 614 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 615 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 616 } 617 if (dm->ops->view) { 618 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 619 } 620 PetscFunctionReturn(0); 621 } 622 623 #undef __FUNCT__ 624 #define __FUNCT__ "DMCreateGlobalVector" 625 /*@ 626 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 627 628 Collective on DM 629 630 Input Parameter: 631 . dm - the DM object 632 633 Output Parameter: 634 . vec - the global vector 635 636 Level: beginner 637 638 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 639 640 @*/ 641 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 642 { 643 PetscErrorCode ierr; 644 645 PetscFunctionBegin; 646 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 647 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 648 PetscFunctionReturn(0); 649 } 650 651 #undef __FUNCT__ 652 #define __FUNCT__ "DMCreateLocalVector" 653 /*@ 654 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 655 656 Not Collective 657 658 Input Parameter: 659 . dm - the DM object 660 661 Output Parameter: 662 . vec - the local vector 663 664 Level: beginner 665 666 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 667 668 @*/ 669 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 670 { 671 PetscErrorCode ierr; 672 673 PetscFunctionBegin; 674 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 675 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 676 PetscFunctionReturn(0); 677 } 678 679 #undef __FUNCT__ 680 #define __FUNCT__ "DMGetLocalToGlobalMapping" 681 /*@ 682 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 683 684 Collective on DM 685 686 Input Parameter: 687 . dm - the DM that provides the mapping 688 689 Output Parameter: 690 . ltog - the mapping 691 692 Level: intermediate 693 694 Notes: 695 This mapping can then be used by VecSetLocalToGlobalMapping() or 696 MatSetLocalToGlobalMapping(). 697 698 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 699 @*/ 700 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 701 { 702 PetscErrorCode ierr; 703 704 PetscFunctionBegin; 705 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 706 PetscValidPointer(ltog,2); 707 if (!dm->ltogmap) { 708 PetscSection section, sectionGlobal; 709 710 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 711 if (section) { 712 PetscInt *ltog; 713 PetscInt pStart, pEnd, size, p, l; 714 715 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 716 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 717 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 718 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 719 for (p = pStart, l = 0; p < pEnd; ++p) { 720 PetscInt dof, off, c; 721 722 /* Should probably use constrained dofs */ 723 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 724 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 725 for (c = 0; c < dof; ++c, ++l) { 726 ltog[l] = off+c; 727 } 728 } 729 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 730 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 731 } else { 732 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 733 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 734 } 735 } 736 *ltog = dm->ltogmap; 737 PetscFunctionReturn(0); 738 } 739 740 #undef __FUNCT__ 741 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 742 /*@ 743 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 744 745 Collective on DM 746 747 Input Parameter: 748 . da - the distributed array that provides the mapping 749 750 Output Parameter: 751 . ltog - the block mapping 752 753 Level: intermediate 754 755 Notes: 756 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 757 MatSetLocalToGlobalMappingBlock(). 758 759 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 760 @*/ 761 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 762 { 763 PetscErrorCode ierr; 764 765 PetscFunctionBegin; 766 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 767 PetscValidPointer(ltog,2); 768 if (!dm->ltogmapb) { 769 PetscInt bs; 770 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 771 if (bs > 1) { 772 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 773 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 774 } else { 775 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 776 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 777 } 778 } 779 *ltog = dm->ltogmapb; 780 PetscFunctionReturn(0); 781 } 782 783 #undef __FUNCT__ 784 #define __FUNCT__ "DMGetBlockSize" 785 /*@ 786 DMGetBlockSize - Gets the inherent block size associated with a DM 787 788 Not Collective 789 790 Input Parameter: 791 . dm - the DM with block structure 792 793 Output Parameter: 794 . bs - the block size, 1 implies no exploitable block structure 795 796 Level: intermediate 797 798 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 799 @*/ 800 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 801 { 802 PetscFunctionBegin; 803 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 804 PetscValidPointer(bs,2); 805 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 806 *bs = dm->bs; 807 PetscFunctionReturn(0); 808 } 809 810 #undef __FUNCT__ 811 #define __FUNCT__ "DMCreateInterpolation" 812 /*@ 813 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 814 815 Collective on DM 816 817 Input Parameter: 818 + dm1 - the DM object 819 - dm2 - the second, finer DM object 820 821 Output Parameter: 822 + mat - the interpolation 823 - vec - the scaling (optional) 824 825 Level: developer 826 827 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 828 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 829 830 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 831 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 832 833 834 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 835 836 @*/ 837 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 838 { 839 PetscErrorCode ierr; 840 841 PetscFunctionBegin; 842 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 843 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 844 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 845 PetscFunctionReturn(0); 846 } 847 848 #undef __FUNCT__ 849 #define __FUNCT__ "DMCreateInjection" 850 /*@ 851 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 852 853 Collective on DM 854 855 Input Parameter: 856 + dm1 - the DM object 857 - dm2 - the second, finer DM object 858 859 Output Parameter: 860 . ctx - the injection 861 862 Level: developer 863 864 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 865 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 866 867 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 868 869 @*/ 870 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 871 { 872 PetscErrorCode ierr; 873 874 PetscFunctionBegin; 875 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 876 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 877 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 878 PetscFunctionReturn(0); 879 } 880 881 #undef __FUNCT__ 882 #define __FUNCT__ "DMCreateColoring" 883 /*@C 884 DMCreateColoring - Gets coloring for a DMDA or DMComposite 885 886 Collective on DM 887 888 Input Parameter: 889 + dm - the DM object 890 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 891 - matype - either MATAIJ or MATBAIJ 892 893 Output Parameter: 894 . coloring - the coloring 895 896 Level: developer 897 898 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 899 900 @*/ 901 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 902 { 903 PetscErrorCode ierr; 904 905 PetscFunctionBegin; 906 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 907 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 908 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 909 PetscFunctionReturn(0); 910 } 911 912 #undef __FUNCT__ 913 #define __FUNCT__ "DMCreateMatrix" 914 /*@C 915 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 916 917 Collective on DM 918 919 Input Parameter: 920 + dm - the DM object 921 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 922 any type which inherits from one of these (such as MATAIJ) 923 924 Output Parameter: 925 . mat - the empty Jacobian 926 927 Level: beginner 928 929 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 930 do not need to do it yourself. 931 932 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 933 the nonzero pattern call DMDASetMatPreallocateOnly() 934 935 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 936 internally by PETSc. 937 938 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 939 the indices for the global numbering for DMDAs which is complicated. 940 941 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 942 943 @*/ 944 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 945 { 946 PetscErrorCode ierr; 947 948 PetscFunctionBegin; 949 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 950 ierr = MatInitializePackage();CHKERRQ(ierr); 951 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 952 PetscValidPointer(mat,3); 953 if (dm->mattype) { 954 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 955 } else { 956 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 957 } 958 PetscFunctionReturn(0); 959 } 960 961 #undef __FUNCT__ 962 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 963 /*@ 964 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 965 preallocated but the nonzero structure and zero values will not be set. 966 967 Logically Collective on DMDA 968 969 Input Parameter: 970 + dm - the DM 971 - only - PETSC_TRUE if only want preallocation 972 973 Level: developer 974 .seealso DMCreateMatrix() 975 @*/ 976 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 977 { 978 PetscFunctionBegin; 979 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 980 dm->prealloc_only = only; 981 PetscFunctionReturn(0); 982 } 983 984 #undef __FUNCT__ 985 #define __FUNCT__ "DMGetWorkArray" 986 /*@C 987 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 988 989 Not Collective 990 991 Input Parameters: 992 + dm - the DM object 993 . count - The minium size 994 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 995 996 Output Parameter: 997 . array - the work array 998 999 Level: developer 1000 1001 .seealso DMDestroy(), DMCreate() 1002 @*/ 1003 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1004 { 1005 PetscErrorCode ierr; 1006 DMWorkLink link; 1007 size_t size; 1008 1009 PetscFunctionBegin; 1010 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1011 PetscValidPointer(mem,4); 1012 if (dm->workin) { 1013 link = dm->workin; 1014 dm->workin = dm->workin->next; 1015 } else { 1016 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 1017 } 1018 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1019 if (size*count > link->bytes) { 1020 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1021 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1022 link->bytes = size*count; 1023 } 1024 link->next = dm->workout; 1025 dm->workout = link; 1026 *(void**)mem = link->mem; 1027 PetscFunctionReturn(0); 1028 } 1029 1030 #undef __FUNCT__ 1031 #define __FUNCT__ "DMRestoreWorkArray" 1032 /*@C 1033 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1034 1035 Not Collective 1036 1037 Input Parameters: 1038 + dm - the DM object 1039 . count - The minium size 1040 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1041 1042 Output Parameter: 1043 . array - the work array 1044 1045 Level: developer 1046 1047 .seealso DMDestroy(), DMCreate() 1048 @*/ 1049 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1050 { 1051 DMWorkLink *p,link; 1052 1053 PetscFunctionBegin; 1054 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1055 PetscValidPointer(mem,4); 1056 for (p=&dm->workout; (link=*p); p=&link->next) { 1057 if (link->mem == *(void**)mem) { 1058 *p = link->next; 1059 link->next = dm->workin; 1060 dm->workin = link; 1061 *(void**)mem = NULL; 1062 PetscFunctionReturn(0); 1063 } 1064 } 1065 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1066 PetscFunctionReturn(0); 1067 } 1068 1069 #undef __FUNCT__ 1070 #define __FUNCT__ "DMSetNullSpaceConstructor" 1071 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1072 { 1073 PetscFunctionBegin; 1074 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1075 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1076 dm->nullspaceConstructors[field] = nullsp; 1077 PetscFunctionReturn(0); 1078 } 1079 1080 #undef __FUNCT__ 1081 #define __FUNCT__ "DMCreateFieldIS" 1082 /*@C 1083 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1084 1085 Not collective 1086 1087 Input Parameter: 1088 . dm - the DM object 1089 1090 Output Parameters: 1091 + numFields - The number of fields (or NULL if not requested) 1092 . fieldNames - The name for each field (or NULL if not requested) 1093 - fields - The global indices for each field (or NULL if not requested) 1094 1095 Level: intermediate 1096 1097 Notes: 1098 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1099 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1100 PetscFree(). 1101 1102 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1103 @*/ 1104 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1105 { 1106 PetscSection section, sectionGlobal; 1107 PetscErrorCode ierr; 1108 1109 PetscFunctionBegin; 1110 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1111 if (numFields) { 1112 PetscValidPointer(numFields,2); 1113 *numFields = 0; 1114 } 1115 if (fieldNames) { 1116 PetscValidPointer(fieldNames,3); 1117 *fieldNames = NULL; 1118 } 1119 if (fields) { 1120 PetscValidPointer(fields,4); 1121 *fields = NULL; 1122 } 1123 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1124 if (section) { 1125 PetscInt *fieldSizes, **fieldIndices; 1126 PetscInt nF, f, pStart, pEnd, p; 1127 1128 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1129 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1130 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr); 1131 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1132 for (f = 0; f < nF; ++f) { 1133 fieldSizes[f] = 0; 1134 } 1135 for (p = pStart; p < pEnd; ++p) { 1136 PetscInt gdof; 1137 1138 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1139 if (gdof > 0) { 1140 for (f = 0; f < nF; ++f) { 1141 PetscInt fdof, fcdof; 1142 1143 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1144 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1145 fieldSizes[f] += fdof-fcdof; 1146 } 1147 } 1148 } 1149 for (f = 0; f < nF; ++f) { 1150 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1151 fieldSizes[f] = 0; 1152 } 1153 for (p = pStart; p < pEnd; ++p) { 1154 PetscInt gdof, goff; 1155 1156 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1157 if (gdof > 0) { 1158 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1159 for (f = 0; f < nF; ++f) { 1160 PetscInt fdof, fcdof, fc; 1161 1162 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1163 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1164 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1165 fieldIndices[f][fieldSizes[f]] = goff++; 1166 } 1167 } 1168 } 1169 } 1170 if (numFields) *numFields = nF; 1171 if (fieldNames) { 1172 ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr); 1173 for (f = 0; f < nF; ++f) { 1174 const char *fieldName; 1175 1176 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1177 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1178 } 1179 } 1180 if (fields) { 1181 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1182 for (f = 0; f < nF; ++f) { 1183 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1184 } 1185 } 1186 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1187 } else if (dm->ops->createfieldis) { 1188 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1189 } 1190 PetscFunctionReturn(0); 1191 } 1192 1193 1194 #undef __FUNCT__ 1195 #define __FUNCT__ "DMCreateFieldDecomposition" 1196 /*@C 1197 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1198 corresponding to different fields: each IS contains the global indices of the dofs of the 1199 corresponding field. The optional list of DMs define the DM for each subproblem. 1200 Generalizes DMCreateFieldIS(). 1201 1202 Not collective 1203 1204 Input Parameter: 1205 . dm - the DM object 1206 1207 Output Parameters: 1208 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1209 . namelist - The name for each field (or NULL if not requested) 1210 . islist - The global indices for each field (or NULL if not requested) 1211 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1212 1213 Level: intermediate 1214 1215 Notes: 1216 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1217 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1218 and all of the arrays should be freed with PetscFree(). 1219 1220 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1221 @*/ 1222 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1223 { 1224 PetscErrorCode ierr; 1225 1226 PetscFunctionBegin; 1227 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1228 if (len) { 1229 PetscValidPointer(len,2); 1230 *len = 0; 1231 } 1232 if (namelist) { 1233 PetscValidPointer(namelist,3); 1234 *namelist = 0; 1235 } 1236 if (islist) { 1237 PetscValidPointer(islist,4); 1238 *islist = 0; 1239 } 1240 if (dmlist) { 1241 PetscValidPointer(dmlist,5); 1242 *dmlist = 0; 1243 } 1244 /* 1245 Is it a good idea to apply the following check across all impls? 1246 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1247 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1248 */ 1249 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1250 if (!dm->ops->createfielddecomposition) { 1251 PetscSection section; 1252 PetscInt numFields, f; 1253 1254 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1255 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1256 if (section && numFields && dm->ops->createsubdm) { 1257 *len = numFields; 1258 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1259 for (f = 0; f < numFields; ++f) { 1260 const char *fieldName; 1261 1262 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1263 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1264 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1265 } 1266 } else { 1267 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1268 /* By default there are no DMs associated with subproblems. */ 1269 if (dmlist) *dmlist = NULL; 1270 } 1271 } else { 1272 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1273 } 1274 PetscFunctionReturn(0); 1275 } 1276 1277 #undef __FUNCT__ 1278 #define __FUNCT__ "DMCreateSubDM" 1279 /*@C 1280 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1281 The fields are defined by DMCreateFieldIS(). 1282 1283 Not collective 1284 1285 Input Parameters: 1286 + dm - the DM object 1287 . numFields - number of fields in this subproblem 1288 - len - The number of subproblems in the decomposition (or NULL if not requested) 1289 1290 Output Parameters: 1291 . is - The global indices for the subproblem 1292 - dm - The DM for the subproblem 1293 1294 Level: intermediate 1295 1296 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1297 @*/ 1298 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1299 { 1300 PetscErrorCode ierr; 1301 1302 PetscFunctionBegin; 1303 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1304 PetscValidPointer(fields,3); 1305 if (is) PetscValidPointer(is,4); 1306 if (subdm) PetscValidPointer(subdm,5); 1307 if (dm->ops->createsubdm) { 1308 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1309 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1310 PetscFunctionReturn(0); 1311 } 1312 1313 1314 #undef __FUNCT__ 1315 #define __FUNCT__ "DMCreateDomainDecomposition" 1316 /*@C 1317 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1318 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1319 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1320 define a nonoverlapping covering, while outer subdomains can overlap. 1321 The optional list of DMs define the DM for each subproblem. 1322 1323 Not collective 1324 1325 Input Parameter: 1326 . dm - the DM object 1327 1328 Output Parameters: 1329 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1330 . namelist - The name for each subdomain (or NULL if not requested) 1331 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1332 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1333 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1334 1335 Level: intermediate 1336 1337 Notes: 1338 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1339 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1340 and all of the arrays should be freed with PetscFree(). 1341 1342 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1343 @*/ 1344 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1345 { 1346 PetscErrorCode ierr; 1347 DMSubDomainHookLink link; 1348 PetscInt i,l; 1349 1350 PetscFunctionBegin; 1351 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1352 if (len) {PetscValidPointer(len,2); *len = 0;} 1353 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1354 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1355 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1356 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1357 /* 1358 Is it a good idea to apply the following check across all impls? 1359 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1360 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1361 */ 1362 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1363 if (dm->ops->createdomaindecomposition) { 1364 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1365 /* copy subdomain hooks and context over to the subdomain DMs */ 1366 if (dmlist) { 1367 for (i = 0; i < l; i++) { 1368 for (link=dm->subdomainhook; link; link=link->next) { 1369 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1370 } 1371 (*dmlist)[i]->ctx = dm->ctx; 1372 } 1373 } 1374 if (len) *len = l; 1375 } 1376 PetscFunctionReturn(0); 1377 } 1378 1379 1380 #undef __FUNCT__ 1381 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1382 /*@C 1383 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1384 1385 Not collective 1386 1387 Input Parameters: 1388 + dm - the DM object 1389 . n - the number of subdomain scatters 1390 - subdms - the local subdomains 1391 1392 Output Parameters: 1393 + n - the number of scatters returned 1394 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1395 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1396 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1397 1398 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1399 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1400 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1401 solution and residual data. 1402 1403 Level: developer 1404 1405 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1406 @*/ 1407 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1408 { 1409 PetscErrorCode ierr; 1410 1411 PetscFunctionBegin; 1412 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1413 PetscValidPointer(subdms,3); 1414 if (dm->ops->createddscatters) { 1415 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1416 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1417 PetscFunctionReturn(0); 1418 } 1419 1420 #undef __FUNCT__ 1421 #define __FUNCT__ "DMRefine" 1422 /*@ 1423 DMRefine - Refines a DM object 1424 1425 Collective on DM 1426 1427 Input Parameter: 1428 + dm - the DM object 1429 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1430 1431 Output Parameter: 1432 . dmf - the refined DM, or NULL 1433 1434 Note: If no refinement was done, the return value is NULL 1435 1436 Level: developer 1437 1438 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1439 @*/ 1440 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1441 { 1442 PetscErrorCode ierr; 1443 DMRefineHookLink link; 1444 1445 PetscFunctionBegin; 1446 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1447 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1448 if (*dmf) { 1449 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1450 1451 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1452 1453 (*dmf)->ctx = dm->ctx; 1454 (*dmf)->leveldown = dm->leveldown; 1455 (*dmf)->levelup = dm->levelup + 1; 1456 1457 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1458 for (link=dm->refinehook; link; link=link->next) { 1459 if (link->refinehook) { 1460 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1461 } 1462 } 1463 } 1464 PetscFunctionReturn(0); 1465 } 1466 1467 #undef __FUNCT__ 1468 #define __FUNCT__ "DMRefineHookAdd" 1469 /*@C 1470 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1471 1472 Logically Collective 1473 1474 Input Arguments: 1475 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1476 . refinehook - function to run when setting up a coarser level 1477 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1478 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1479 1480 Calling sequence of refinehook: 1481 $ refinehook(DM coarse,DM fine,void *ctx); 1482 1483 + coarse - coarse level DM 1484 . fine - fine level DM to interpolate problem to 1485 - ctx - optional user-defined function context 1486 1487 Calling sequence for interphook: 1488 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1489 1490 + coarse - coarse level DM 1491 . interp - matrix interpolating a coarse-level solution to the finer grid 1492 . fine - fine level DM to update 1493 - ctx - optional user-defined function context 1494 1495 Level: advanced 1496 1497 Notes: 1498 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1499 1500 If this function is called multiple times, the hooks will be run in the order they are added. 1501 1502 This function is currently not available from Fortran. 1503 1504 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1505 @*/ 1506 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1507 { 1508 PetscErrorCode ierr; 1509 DMRefineHookLink link,*p; 1510 1511 PetscFunctionBegin; 1512 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1513 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1514 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1515 link->refinehook = refinehook; 1516 link->interphook = interphook; 1517 link->ctx = ctx; 1518 link->next = NULL; 1519 *p = link; 1520 PetscFunctionReturn(0); 1521 } 1522 1523 #undef __FUNCT__ 1524 #define __FUNCT__ "DMInterpolate" 1525 /*@ 1526 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1527 1528 Collective if any hooks are 1529 1530 Input Arguments: 1531 + coarse - coarser DM to use as a base 1532 . restrct - interpolation matrix, apply using MatInterpolate() 1533 - fine - finer DM to update 1534 1535 Level: developer 1536 1537 .seealso: DMRefineHookAdd(), MatInterpolate() 1538 @*/ 1539 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1540 { 1541 PetscErrorCode ierr; 1542 DMRefineHookLink link; 1543 1544 PetscFunctionBegin; 1545 for (link=fine->refinehook; link; link=link->next) { 1546 if (link->interphook) { 1547 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1548 } 1549 } 1550 PetscFunctionReturn(0); 1551 } 1552 1553 #undef __FUNCT__ 1554 #define __FUNCT__ "DMGetRefineLevel" 1555 /*@ 1556 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1557 1558 Not Collective 1559 1560 Input Parameter: 1561 . dm - the DM object 1562 1563 Output Parameter: 1564 . level - number of refinements 1565 1566 Level: developer 1567 1568 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1569 1570 @*/ 1571 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1572 { 1573 PetscFunctionBegin; 1574 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1575 *level = dm->levelup; 1576 PetscFunctionReturn(0); 1577 } 1578 1579 #undef __FUNCT__ 1580 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1581 /*@C 1582 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1583 1584 Logically Collective 1585 1586 Input Arguments: 1587 + dm - the DM 1588 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1589 . endhook - function to run after DMGlobalToLocalEnd() has completed 1590 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1591 1592 Calling sequence for beginhook: 1593 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1594 1595 + dm - global DM 1596 . g - global vector 1597 . mode - mode 1598 . l - local vector 1599 - ctx - optional user-defined function context 1600 1601 1602 Calling sequence for endhook: 1603 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1604 1605 + global - global DM 1606 - ctx - optional user-defined function context 1607 1608 Level: advanced 1609 1610 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1611 @*/ 1612 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1613 { 1614 PetscErrorCode ierr; 1615 DMGlobalToLocalHookLink link,*p; 1616 1617 PetscFunctionBegin; 1618 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1619 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1620 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1621 link->beginhook = beginhook; 1622 link->endhook = endhook; 1623 link->ctx = ctx; 1624 link->next = NULL; 1625 *p = link; 1626 PetscFunctionReturn(0); 1627 } 1628 1629 #undef __FUNCT__ 1630 #define __FUNCT__ "DMGlobalToLocalBegin" 1631 /*@ 1632 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1633 1634 Neighbor-wise Collective on DM 1635 1636 Input Parameters: 1637 + dm - the DM object 1638 . g - the global vector 1639 . mode - INSERT_VALUES or ADD_VALUES 1640 - l - the local vector 1641 1642 1643 Level: beginner 1644 1645 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1646 1647 @*/ 1648 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1649 { 1650 PetscSF sf; 1651 PetscErrorCode ierr; 1652 DMGlobalToLocalHookLink link; 1653 1654 PetscFunctionBegin; 1655 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1656 for (link=dm->gtolhook; link; link=link->next) { 1657 if (link->beginhook) { 1658 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1659 } 1660 } 1661 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1662 if (sf) { 1663 PetscScalar *lArray, *gArray; 1664 1665 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1666 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1667 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1668 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1669 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1670 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1671 } else { 1672 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1673 } 1674 PetscFunctionReturn(0); 1675 } 1676 1677 #undef __FUNCT__ 1678 #define __FUNCT__ "DMGlobalToLocalEnd" 1679 /*@ 1680 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1681 1682 Neighbor-wise Collective on DM 1683 1684 Input Parameters: 1685 + dm - the DM object 1686 . g - the global vector 1687 . mode - INSERT_VALUES or ADD_VALUES 1688 - l - the local vector 1689 1690 1691 Level: beginner 1692 1693 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1694 1695 @*/ 1696 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1697 { 1698 PetscSF sf; 1699 PetscErrorCode ierr; 1700 PetscScalar *lArray, *gArray; 1701 DMGlobalToLocalHookLink link; 1702 1703 PetscFunctionBegin; 1704 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1705 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1706 if (sf) { 1707 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1708 1709 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1710 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1711 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1712 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1713 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1714 } else { 1715 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1716 } 1717 for (link=dm->gtolhook; link; link=link->next) { 1718 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1719 } 1720 PetscFunctionReturn(0); 1721 } 1722 1723 #undef __FUNCT__ 1724 #define __FUNCT__ "DMLocalToGlobalBegin" 1725 /*@ 1726 DMLocalToGlobalBegin - updates global vectors from local vectors 1727 1728 Neighbor-wise Collective on DM 1729 1730 Input Parameters: 1731 + dm - the DM object 1732 . l - the local vector 1733 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1734 base point. 1735 - - the global vector 1736 1737 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1738 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1739 global array to the final global array with VecAXPY(). 1740 1741 Level: beginner 1742 1743 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1744 1745 @*/ 1746 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1747 { 1748 PetscSF sf; 1749 PetscErrorCode ierr; 1750 1751 PetscFunctionBegin; 1752 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1753 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1754 if (sf) { 1755 MPI_Op op; 1756 PetscScalar *lArray, *gArray; 1757 1758 switch (mode) { 1759 case INSERT_VALUES: 1760 case INSERT_ALL_VALUES: 1761 op = MPIU_REPLACE; break; 1762 case ADD_VALUES: 1763 case ADD_ALL_VALUES: 1764 op = MPI_SUM; break; 1765 default: 1766 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1767 } 1768 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1769 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1770 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1771 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1772 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1773 } else { 1774 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1775 } 1776 PetscFunctionReturn(0); 1777 } 1778 1779 #undef __FUNCT__ 1780 #define __FUNCT__ "DMLocalToGlobalEnd" 1781 /*@ 1782 DMLocalToGlobalEnd - updates global vectors from local vectors 1783 1784 Neighbor-wise Collective on DM 1785 1786 Input Parameters: 1787 + dm - the DM object 1788 . l - the local vector 1789 . mode - INSERT_VALUES or ADD_VALUES 1790 - g - the global vector 1791 1792 1793 Level: beginner 1794 1795 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1796 1797 @*/ 1798 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1799 { 1800 PetscSF sf; 1801 PetscErrorCode ierr; 1802 1803 PetscFunctionBegin; 1804 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1805 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1806 if (sf) { 1807 MPI_Op op; 1808 PetscScalar *lArray, *gArray; 1809 1810 switch (mode) { 1811 case INSERT_VALUES: 1812 case INSERT_ALL_VALUES: 1813 op = MPIU_REPLACE; break; 1814 case ADD_VALUES: 1815 case ADD_ALL_VALUES: 1816 op = MPI_SUM; break; 1817 default: 1818 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1819 } 1820 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1821 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1822 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1823 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1824 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1825 } else { 1826 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1827 } 1828 PetscFunctionReturn(0); 1829 } 1830 1831 #undef __FUNCT__ 1832 #define __FUNCT__ "DMLocalToLocalBegin" 1833 /*@ 1834 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1835 that contain irrelevant values) to another local vector where the ghost 1836 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1837 1838 Neighbor-wise Collective on DM and Vec 1839 1840 Input Parameters: 1841 + dm - the DM object 1842 . g - the original local vector 1843 - mode - one of INSERT_VALUES or ADD_VALUES 1844 1845 Output Parameter: 1846 . l - the local vector with correct ghost values 1847 1848 Level: intermediate 1849 1850 Notes: 1851 The local vectors used here need not be the same as those 1852 obtained from DMCreateLocalVector(), BUT they 1853 must have the same parallel data layout; they could, for example, be 1854 obtained with VecDuplicate() from the DM originating vectors. 1855 1856 .keywords: DM, local-to-local, begin 1857 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1858 1859 @*/ 1860 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1861 { 1862 PetscErrorCode ierr; 1863 1864 PetscFunctionBegin; 1865 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1866 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1867 PetscFunctionReturn(0); 1868 } 1869 1870 #undef __FUNCT__ 1871 #define __FUNCT__ "DMLocalToLocalEnd" 1872 /*@ 1873 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1874 that contain irrelevant values) to another local vector where the ghost 1875 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1876 1877 Neighbor-wise Collective on DM and Vec 1878 1879 Input Parameters: 1880 + da - the DM object 1881 . g - the original local vector 1882 - mode - one of INSERT_VALUES or ADD_VALUES 1883 1884 Output Parameter: 1885 . l - the local vector with correct ghost values 1886 1887 Level: intermediate 1888 1889 Notes: 1890 The local vectors used here need not be the same as those 1891 obtained from DMCreateLocalVector(), BUT they 1892 must have the same parallel data layout; they could, for example, be 1893 obtained with VecDuplicate() from the DM originating vectors. 1894 1895 .keywords: DM, local-to-local, end 1896 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1897 1898 @*/ 1899 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1900 { 1901 PetscErrorCode ierr; 1902 1903 PetscFunctionBegin; 1904 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1905 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1906 PetscFunctionReturn(0); 1907 } 1908 1909 1910 #undef __FUNCT__ 1911 #define __FUNCT__ "DMCoarsen" 1912 /*@ 1913 DMCoarsen - Coarsens a DM object 1914 1915 Collective on DM 1916 1917 Input Parameter: 1918 + dm - the DM object 1919 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1920 1921 Output Parameter: 1922 . dmc - the coarsened DM 1923 1924 Level: developer 1925 1926 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1927 1928 @*/ 1929 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1930 { 1931 PetscErrorCode ierr; 1932 DMCoarsenHookLink link; 1933 1934 PetscFunctionBegin; 1935 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1936 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1937 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1938 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1939 (*dmc)->ctx = dm->ctx; 1940 (*dmc)->levelup = dm->levelup; 1941 (*dmc)->leveldown = dm->leveldown + 1; 1942 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1943 for (link=dm->coarsenhook; link; link=link->next) { 1944 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1945 } 1946 PetscFunctionReturn(0); 1947 } 1948 1949 #undef __FUNCT__ 1950 #define __FUNCT__ "DMCoarsenHookAdd" 1951 /*@C 1952 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1953 1954 Logically Collective 1955 1956 Input Arguments: 1957 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1958 . coarsenhook - function to run when setting up a coarser level 1959 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1960 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1961 1962 Calling sequence of coarsenhook: 1963 $ coarsenhook(DM fine,DM coarse,void *ctx); 1964 1965 + fine - fine level DM 1966 . coarse - coarse level DM to restrict problem to 1967 - ctx - optional user-defined function context 1968 1969 Calling sequence for restricthook: 1970 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1971 1972 + fine - fine level DM 1973 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1974 . rscale - scaling vector for restriction 1975 . inject - matrix restricting by injection 1976 . coarse - coarse level DM to update 1977 - ctx - optional user-defined function context 1978 1979 Level: advanced 1980 1981 Notes: 1982 This function is only needed if auxiliary data needs to be set up on coarse grids. 1983 1984 If this function is called multiple times, the hooks will be run in the order they are added. 1985 1986 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1987 extract the finest level information from its context (instead of from the SNES). 1988 1989 This function is currently not available from Fortran. 1990 1991 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1992 @*/ 1993 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1994 { 1995 PetscErrorCode ierr; 1996 DMCoarsenHookLink link,*p; 1997 1998 PetscFunctionBegin; 1999 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2000 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2001 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2002 link->coarsenhook = coarsenhook; 2003 link->restricthook = restricthook; 2004 link->ctx = ctx; 2005 link->next = NULL; 2006 *p = link; 2007 PetscFunctionReturn(0); 2008 } 2009 2010 #undef __FUNCT__ 2011 #define __FUNCT__ "DMRestrict" 2012 /*@ 2013 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2014 2015 Collective if any hooks are 2016 2017 Input Arguments: 2018 + fine - finer DM to use as a base 2019 . restrct - restriction matrix, apply using MatRestrict() 2020 . inject - injection matrix, also use MatRestrict() 2021 - coarse - coarer DM to update 2022 2023 Level: developer 2024 2025 .seealso: DMCoarsenHookAdd(), MatRestrict() 2026 @*/ 2027 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2028 { 2029 PetscErrorCode ierr; 2030 DMCoarsenHookLink link; 2031 2032 PetscFunctionBegin; 2033 for (link=fine->coarsenhook; link; link=link->next) { 2034 if (link->restricthook) { 2035 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2036 } 2037 } 2038 PetscFunctionReturn(0); 2039 } 2040 2041 #undef __FUNCT__ 2042 #define __FUNCT__ "DMSubDomainHookAdd" 2043 /*@C 2044 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2045 2046 Logically Collective 2047 2048 Input Arguments: 2049 + global - global DM 2050 . ddhook - function to run to pass data to the decomposition DM upon its creation 2051 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2052 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2053 2054 2055 Calling sequence for ddhook: 2056 $ ddhook(DM global,DM block,void *ctx) 2057 2058 + global - global DM 2059 . block - block DM 2060 - ctx - optional user-defined function context 2061 2062 Calling sequence for restricthook: 2063 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2064 2065 + global - global DM 2066 . out - scatter to the outer (with ghost and overlap points) block vector 2067 . in - scatter to block vector values only owned locally 2068 . block - block DM 2069 - ctx - optional user-defined function context 2070 2071 Level: advanced 2072 2073 Notes: 2074 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2075 2076 If this function is called multiple times, the hooks will be run in the order they are added. 2077 2078 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2079 extract the global information from its context (instead of from the SNES). 2080 2081 This function is currently not available from Fortran. 2082 2083 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2084 @*/ 2085 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2086 { 2087 PetscErrorCode ierr; 2088 DMSubDomainHookLink link,*p; 2089 2090 PetscFunctionBegin; 2091 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2092 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2093 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2094 link->restricthook = restricthook; 2095 link->ddhook = ddhook; 2096 link->ctx = ctx; 2097 link->next = NULL; 2098 *p = link; 2099 PetscFunctionReturn(0); 2100 } 2101 2102 #undef __FUNCT__ 2103 #define __FUNCT__ "DMSubDomainRestrict" 2104 /*@ 2105 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2106 2107 Collective if any hooks are 2108 2109 Input Arguments: 2110 + fine - finer DM to use as a base 2111 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2112 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2113 - coarse - coarer DM to update 2114 2115 Level: developer 2116 2117 .seealso: DMCoarsenHookAdd(), MatRestrict() 2118 @*/ 2119 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2120 { 2121 PetscErrorCode ierr; 2122 DMSubDomainHookLink link; 2123 2124 PetscFunctionBegin; 2125 for (link=global->subdomainhook; link; link=link->next) { 2126 if (link->restricthook) { 2127 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2128 } 2129 } 2130 PetscFunctionReturn(0); 2131 } 2132 2133 #undef __FUNCT__ 2134 #define __FUNCT__ "DMGetCoarsenLevel" 2135 /*@ 2136 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2137 2138 Not Collective 2139 2140 Input Parameter: 2141 . dm - the DM object 2142 2143 Output Parameter: 2144 . level - number of coarsenings 2145 2146 Level: developer 2147 2148 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2149 2150 @*/ 2151 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2152 { 2153 PetscFunctionBegin; 2154 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2155 *level = dm->leveldown; 2156 PetscFunctionReturn(0); 2157 } 2158 2159 2160 2161 #undef __FUNCT__ 2162 #define __FUNCT__ "DMRefineHierarchy" 2163 /*@C 2164 DMRefineHierarchy - Refines a DM object, all levels at once 2165 2166 Collective on DM 2167 2168 Input Parameter: 2169 + dm - the DM object 2170 - nlevels - the number of levels of refinement 2171 2172 Output Parameter: 2173 . dmf - the refined DM hierarchy 2174 2175 Level: developer 2176 2177 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2178 2179 @*/ 2180 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2181 { 2182 PetscErrorCode ierr; 2183 2184 PetscFunctionBegin; 2185 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2186 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2187 if (nlevels == 0) PetscFunctionReturn(0); 2188 if (dm->ops->refinehierarchy) { 2189 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2190 } else if (dm->ops->refine) { 2191 PetscInt i; 2192 2193 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2194 for (i=1; i<nlevels; i++) { 2195 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2196 } 2197 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2198 PetscFunctionReturn(0); 2199 } 2200 2201 #undef __FUNCT__ 2202 #define __FUNCT__ "DMCoarsenHierarchy" 2203 /*@C 2204 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2205 2206 Collective on DM 2207 2208 Input Parameter: 2209 + dm - the DM object 2210 - nlevels - the number of levels of coarsening 2211 2212 Output Parameter: 2213 . dmc - the coarsened DM hierarchy 2214 2215 Level: developer 2216 2217 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2218 2219 @*/ 2220 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2221 { 2222 PetscErrorCode ierr; 2223 2224 PetscFunctionBegin; 2225 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2226 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2227 if (nlevels == 0) PetscFunctionReturn(0); 2228 PetscValidPointer(dmc,3); 2229 if (dm->ops->coarsenhierarchy) { 2230 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2231 } else if (dm->ops->coarsen) { 2232 PetscInt i; 2233 2234 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2235 for (i=1; i<nlevels; i++) { 2236 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2237 } 2238 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2239 PetscFunctionReturn(0); 2240 } 2241 2242 #undef __FUNCT__ 2243 #define __FUNCT__ "DMCreateAggregates" 2244 /*@ 2245 DMCreateAggregates - Gets the aggregates that map between 2246 grids associated with two DMs. 2247 2248 Collective on DM 2249 2250 Input Parameters: 2251 + dmc - the coarse grid DM 2252 - dmf - the fine grid DM 2253 2254 Output Parameters: 2255 . rest - the restriction matrix (transpose of the projection matrix) 2256 2257 Level: intermediate 2258 2259 .keywords: interpolation, restriction, multigrid 2260 2261 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2262 @*/ 2263 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2264 { 2265 PetscErrorCode ierr; 2266 2267 PetscFunctionBegin; 2268 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2269 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2270 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2271 PetscFunctionReturn(0); 2272 } 2273 2274 #undef __FUNCT__ 2275 #define __FUNCT__ "DMSetApplicationContextDestroy" 2276 /*@C 2277 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2278 2279 Not Collective 2280 2281 Input Parameters: 2282 + dm - the DM object 2283 - destroy - the destroy function 2284 2285 Level: intermediate 2286 2287 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2288 2289 @*/ 2290 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2291 { 2292 PetscFunctionBegin; 2293 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2294 dm->ctxdestroy = destroy; 2295 PetscFunctionReturn(0); 2296 } 2297 2298 #undef __FUNCT__ 2299 #define __FUNCT__ "DMSetApplicationContext" 2300 /*@ 2301 DMSetApplicationContext - Set a user context into a DM object 2302 2303 Not Collective 2304 2305 Input Parameters: 2306 + dm - the DM object 2307 - ctx - the user context 2308 2309 Level: intermediate 2310 2311 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2312 2313 @*/ 2314 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2315 { 2316 PetscFunctionBegin; 2317 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2318 dm->ctx = ctx; 2319 PetscFunctionReturn(0); 2320 } 2321 2322 #undef __FUNCT__ 2323 #define __FUNCT__ "DMGetApplicationContext" 2324 /*@ 2325 DMGetApplicationContext - Gets a user context from a DM object 2326 2327 Not Collective 2328 2329 Input Parameter: 2330 . dm - the DM object 2331 2332 Output Parameter: 2333 . ctx - the user context 2334 2335 Level: intermediate 2336 2337 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2338 2339 @*/ 2340 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2341 { 2342 PetscFunctionBegin; 2343 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2344 *(void**)ctx = dm->ctx; 2345 PetscFunctionReturn(0); 2346 } 2347 2348 #undef __FUNCT__ 2349 #define __FUNCT__ "DMSetVariableBounds" 2350 /*@C 2351 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2352 2353 Logically Collective on DM 2354 2355 Input Parameter: 2356 + dm - the DM object 2357 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2358 2359 Level: intermediate 2360 2361 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2362 DMSetJacobian() 2363 2364 @*/ 2365 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2366 { 2367 PetscFunctionBegin; 2368 dm->ops->computevariablebounds = f; 2369 PetscFunctionReturn(0); 2370 } 2371 2372 #undef __FUNCT__ 2373 #define __FUNCT__ "DMHasVariableBounds" 2374 /*@ 2375 DMHasVariableBounds - does the DM object have a variable bounds function? 2376 2377 Not Collective 2378 2379 Input Parameter: 2380 . dm - the DM object to destroy 2381 2382 Output Parameter: 2383 . flg - PETSC_TRUE if the variable bounds function exists 2384 2385 Level: developer 2386 2387 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2388 2389 @*/ 2390 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2391 { 2392 PetscFunctionBegin; 2393 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2394 PetscFunctionReturn(0); 2395 } 2396 2397 #undef __FUNCT__ 2398 #define __FUNCT__ "DMComputeVariableBounds" 2399 /*@C 2400 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2401 2402 Logically Collective on DM 2403 2404 Input Parameters: 2405 + dm - the DM object to destroy 2406 - x - current solution at which the bounds are computed 2407 2408 Output parameters: 2409 + xl - lower bound 2410 - xu - upper bound 2411 2412 Level: intermediate 2413 2414 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2415 2416 @*/ 2417 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2418 { 2419 PetscErrorCode ierr; 2420 2421 PetscFunctionBegin; 2422 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2423 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2424 if (dm->ops->computevariablebounds) { 2425 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2426 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2427 PetscFunctionReturn(0); 2428 } 2429 2430 #undef __FUNCT__ 2431 #define __FUNCT__ "DMHasColoring" 2432 /*@ 2433 DMHasColoring - does the DM object have a method of providing a coloring? 2434 2435 Not Collective 2436 2437 Input Parameter: 2438 . dm - the DM object 2439 2440 Output Parameter: 2441 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2442 2443 Level: developer 2444 2445 .seealso DMHasFunction(), DMCreateColoring() 2446 2447 @*/ 2448 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2449 { 2450 PetscFunctionBegin; 2451 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2452 PetscFunctionReturn(0); 2453 } 2454 2455 #undef __FUNCT__ 2456 #define __FUNCT__ "DMSetVec" 2457 /*@C 2458 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2459 2460 Collective on DM 2461 2462 Input Parameter: 2463 + dm - the DM object 2464 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2465 2466 Level: developer 2467 2468 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2469 2470 @*/ 2471 PetscErrorCode DMSetVec(DM dm,Vec x) 2472 { 2473 PetscErrorCode ierr; 2474 2475 PetscFunctionBegin; 2476 if (x) { 2477 if (!dm->x) { 2478 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2479 } 2480 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2481 } else if (dm->x) { 2482 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2483 } 2484 PetscFunctionReturn(0); 2485 } 2486 2487 PetscFunctionList DMList = NULL; 2488 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2489 2490 #undef __FUNCT__ 2491 #define __FUNCT__ "DMSetType" 2492 /*@C 2493 DMSetType - Builds a DM, for a particular DM implementation. 2494 2495 Collective on DM 2496 2497 Input Parameters: 2498 + dm - The DM object 2499 - method - The name of the DM type 2500 2501 Options Database Key: 2502 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2503 2504 Notes: 2505 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2506 2507 Level: intermediate 2508 2509 .keywords: DM, set, type 2510 .seealso: DMGetType(), DMCreate() 2511 @*/ 2512 PetscErrorCode DMSetType(DM dm, DMType method) 2513 { 2514 PetscErrorCode (*r)(DM); 2515 PetscBool match; 2516 PetscErrorCode ierr; 2517 2518 PetscFunctionBegin; 2519 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2520 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2521 if (match) PetscFunctionReturn(0); 2522 2523 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2524 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2525 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2526 2527 if (dm->ops->destroy) { 2528 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2529 dm->ops->destroy = NULL; 2530 } 2531 ierr = (*r)(dm);CHKERRQ(ierr); 2532 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2533 PetscFunctionReturn(0); 2534 } 2535 2536 #undef __FUNCT__ 2537 #define __FUNCT__ "DMGetType" 2538 /*@C 2539 DMGetType - Gets the DM type name (as a string) from the DM. 2540 2541 Not Collective 2542 2543 Input Parameter: 2544 . dm - The DM 2545 2546 Output Parameter: 2547 . type - The DM type name 2548 2549 Level: intermediate 2550 2551 .keywords: DM, get, type, name 2552 .seealso: DMSetType(), DMCreate() 2553 @*/ 2554 PetscErrorCode DMGetType(DM dm, DMType *type) 2555 { 2556 PetscErrorCode ierr; 2557 2558 PetscFunctionBegin; 2559 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2560 PetscValidCharPointer(type,2); 2561 if (!DMRegisterAllCalled) { 2562 ierr = DMRegisterAll();CHKERRQ(ierr); 2563 } 2564 *type = ((PetscObject)dm)->type_name; 2565 PetscFunctionReturn(0); 2566 } 2567 2568 #undef __FUNCT__ 2569 #define __FUNCT__ "DMConvert" 2570 /*@C 2571 DMConvert - Converts a DM to another DM, either of the same or different type. 2572 2573 Collective on DM 2574 2575 Input Parameters: 2576 + dm - the DM 2577 - newtype - new DM type (use "same" for the same type) 2578 2579 Output Parameter: 2580 . M - pointer to new DM 2581 2582 Notes: 2583 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2584 the MPI communicator of the generated DM is always the same as the communicator 2585 of the input DM. 2586 2587 Level: intermediate 2588 2589 .seealso: DMCreate() 2590 @*/ 2591 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2592 { 2593 DM B; 2594 char convname[256]; 2595 PetscBool sametype, issame; 2596 PetscErrorCode ierr; 2597 2598 PetscFunctionBegin; 2599 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2600 PetscValidType(dm,1); 2601 PetscValidPointer(M,3); 2602 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2603 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2604 { 2605 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2606 2607 /* 2608 Order of precedence: 2609 1) See if a specialized converter is known to the current DM. 2610 2) See if a specialized converter is known to the desired DM class. 2611 3) See if a good general converter is registered for the desired class 2612 4) See if a good general converter is known for the current matrix. 2613 5) Use a really basic converter. 2614 */ 2615 2616 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2617 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2618 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2619 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2620 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2621 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2622 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2623 if (conv) goto foundconv; 2624 2625 /* 2) See if a specialized converter is known to the desired DM class. */ 2626 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2627 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2628 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2629 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2630 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2631 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2632 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2633 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2634 if (conv) { 2635 ierr = DMDestroy(&B);CHKERRQ(ierr); 2636 goto foundconv; 2637 } 2638 2639 #if 0 2640 /* 3) See if a good general converter is registered for the desired class */ 2641 conv = B->ops->convertfrom; 2642 ierr = DMDestroy(&B);CHKERRQ(ierr); 2643 if (conv) goto foundconv; 2644 2645 /* 4) See if a good general converter is known for the current matrix */ 2646 if (dm->ops->convert) { 2647 conv = dm->ops->convert; 2648 } 2649 if (conv) goto foundconv; 2650 #endif 2651 2652 /* 5) Use a really basic converter. */ 2653 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2654 2655 foundconv: 2656 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2657 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2658 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2659 } 2660 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2661 PetscFunctionReturn(0); 2662 } 2663 2664 /*--------------------------------------------------------------------------------------------------------------------*/ 2665 2666 #undef __FUNCT__ 2667 #define __FUNCT__ "DMRegister" 2668 /*@C 2669 DMRegister - Adds a new DM component implementation 2670 2671 Not Collective 2672 2673 Input Parameters: 2674 + name - The name of a new user-defined creation routine 2675 - create_func - The creation routine itself 2676 2677 Notes: 2678 DMRegister() may be called multiple times to add several user-defined DMs 2679 2680 2681 Sample usage: 2682 .vb 2683 DMRegister("my_da", MyDMCreate); 2684 .ve 2685 2686 Then, your DM type can be chosen with the procedural interface via 2687 .vb 2688 DMCreate(MPI_Comm, DM *); 2689 DMSetType(DM,"my_da"); 2690 .ve 2691 or at runtime via the option 2692 .vb 2693 -da_type my_da 2694 .ve 2695 2696 Level: advanced 2697 2698 .keywords: DM, register 2699 .seealso: DMRegisterAll(), DMRegisterDestroy() 2700 2701 @*/ 2702 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2703 { 2704 PetscErrorCode ierr; 2705 2706 PetscFunctionBegin; 2707 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2708 PetscFunctionReturn(0); 2709 } 2710 2711 #undef __FUNCT__ 2712 #define __FUNCT__ "DMLoad" 2713 /*@C 2714 DMLoad - Loads a DM that has been stored in binary with DMView(). 2715 2716 Collective on PetscViewer 2717 2718 Input Parameters: 2719 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2720 some related function before a call to DMLoad(). 2721 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2722 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2723 2724 Level: intermediate 2725 2726 Notes: 2727 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2728 2729 Notes for advanced users: 2730 Most users should not need to know the details of the binary storage 2731 format, since DMLoad() and DMView() completely hide these details. 2732 But for anyone who's interested, the standard binary matrix storage 2733 format is 2734 .vb 2735 has not yet been determined 2736 .ve 2737 2738 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2739 @*/ 2740 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2741 { 2742 PetscErrorCode ierr; 2743 PetscBool isbinary; 2744 PetscInt classid; 2745 char type[256]; 2746 2747 PetscFunctionBegin; 2748 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2749 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2750 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2751 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2752 2753 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2754 if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file"); 2755 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2756 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2757 if (newdm->ops->load) { 2758 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2759 } 2760 PetscFunctionReturn(0); 2761 } 2762 2763 /******************************** FEM Support **********************************/ 2764 2765 #undef __FUNCT__ 2766 #define __FUNCT__ "DMPrintCellVector" 2767 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2768 { 2769 PetscInt f; 2770 PetscErrorCode ierr; 2771 2772 PetscFunctionBegin; 2773 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2774 for (f = 0; f < len; ++f) { 2775 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2776 } 2777 PetscFunctionReturn(0); 2778 } 2779 2780 #undef __FUNCT__ 2781 #define __FUNCT__ "DMPrintCellMatrix" 2782 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2783 { 2784 PetscInt f, g; 2785 PetscErrorCode ierr; 2786 2787 PetscFunctionBegin; 2788 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2789 for (f = 0; f < rows; ++f) { 2790 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2791 for (g = 0; g < cols; ++g) { 2792 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2793 } 2794 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2795 } 2796 PetscFunctionReturn(0); 2797 } 2798 2799 #undef __FUNCT__ 2800 #define __FUNCT__ "DMGetDefaultSection" 2801 /*@ 2802 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2803 2804 Input Parameter: 2805 . dm - The DM 2806 2807 Output Parameter: 2808 . section - The PetscSection 2809 2810 Level: intermediate 2811 2812 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2813 2814 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2815 @*/ 2816 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2817 { 2818 PetscFunctionBegin; 2819 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2820 PetscValidPointer(section, 2); 2821 *section = dm->defaultSection; 2822 PetscFunctionReturn(0); 2823 } 2824 2825 #undef __FUNCT__ 2826 #define __FUNCT__ "DMSetDefaultSection" 2827 /*@ 2828 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2829 2830 Input Parameters: 2831 + dm - The DM 2832 - section - The PetscSection 2833 2834 Level: intermediate 2835 2836 Note: Any existing Section will be destroyed 2837 2838 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2839 @*/ 2840 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2841 { 2842 PetscInt numFields; 2843 PetscInt f; 2844 PetscErrorCode ierr; 2845 2846 PetscFunctionBegin; 2847 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2848 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2849 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2850 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2851 dm->defaultSection = section; 2852 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2853 if (numFields) { 2854 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2855 for (f = 0; f < numFields; ++f) { 2856 const char *name; 2857 2858 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2859 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2860 } 2861 } 2862 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2863 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2864 PetscFunctionReturn(0); 2865 } 2866 2867 #undef __FUNCT__ 2868 #define __FUNCT__ "DMGetDefaultGlobalSection" 2869 /*@ 2870 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2871 2872 Collective on DM 2873 2874 Input Parameter: 2875 . dm - The DM 2876 2877 Output Parameter: 2878 . section - The PetscSection 2879 2880 Level: intermediate 2881 2882 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2883 2884 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2885 @*/ 2886 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2887 { 2888 PetscErrorCode ierr; 2889 2890 PetscFunctionBegin; 2891 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2892 PetscValidPointer(section, 2); 2893 if (!dm->defaultGlobalSection) { 2894 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2895 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2896 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2897 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2898 } 2899 *section = dm->defaultGlobalSection; 2900 PetscFunctionReturn(0); 2901 } 2902 2903 #undef __FUNCT__ 2904 #define __FUNCT__ "DMSetDefaultGlobalSection" 2905 /*@ 2906 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2907 2908 Input Parameters: 2909 + dm - The DM 2910 - section - The PetscSection, or NULL 2911 2912 Level: intermediate 2913 2914 Note: Any existing Section will be destroyed 2915 2916 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2917 @*/ 2918 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2919 { 2920 PetscErrorCode ierr; 2921 2922 PetscFunctionBegin; 2923 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2924 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2925 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2926 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2927 dm->defaultGlobalSection = section; 2928 PetscFunctionReturn(0); 2929 } 2930 2931 #undef __FUNCT__ 2932 #define __FUNCT__ "DMGetDefaultSF" 2933 /*@ 2934 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2935 it is created from the default PetscSection layouts in the DM. 2936 2937 Input Parameter: 2938 . dm - The DM 2939 2940 Output Parameter: 2941 . sf - The PetscSF 2942 2943 Level: intermediate 2944 2945 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2946 2947 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 2948 @*/ 2949 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 2950 { 2951 PetscInt nroots; 2952 PetscErrorCode ierr; 2953 2954 PetscFunctionBegin; 2955 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2956 PetscValidPointer(sf, 2); 2957 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 2958 if (nroots < 0) { 2959 PetscSection section, gSection; 2960 2961 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 2962 if (section) { 2963 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 2964 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 2965 } else { 2966 *sf = NULL; 2967 PetscFunctionReturn(0); 2968 } 2969 } 2970 *sf = dm->defaultSF; 2971 PetscFunctionReturn(0); 2972 } 2973 2974 #undef __FUNCT__ 2975 #define __FUNCT__ "DMSetDefaultSF" 2976 /*@ 2977 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 2978 2979 Input Parameters: 2980 + dm - The DM 2981 - sf - The PetscSF 2982 2983 Level: intermediate 2984 2985 Note: Any previous SF is destroyed 2986 2987 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 2988 @*/ 2989 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 2990 { 2991 PetscErrorCode ierr; 2992 2993 PetscFunctionBegin; 2994 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2995 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 2996 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 2997 dm->defaultSF = sf; 2998 PetscFunctionReturn(0); 2999 } 3000 3001 #undef __FUNCT__ 3002 #define __FUNCT__ "DMCreateDefaultSF" 3003 /*@C 3004 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3005 describing the data layout. 3006 3007 Input Parameters: 3008 + dm - The DM 3009 . localSection - PetscSection describing the local data layout 3010 - globalSection - PetscSection describing the global data layout 3011 3012 Level: intermediate 3013 3014 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3015 @*/ 3016 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3017 { 3018 MPI_Comm comm; 3019 PetscLayout layout; 3020 const PetscInt *ranges; 3021 PetscInt *local; 3022 PetscSFNode *remote; 3023 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3024 PetscMPIInt size, rank; 3025 PetscErrorCode ierr; 3026 3027 PetscFunctionBegin; 3028 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3029 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3030 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3031 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3032 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3033 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3034 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3035 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3036 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3037 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3038 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3039 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 3040 PetscInt gdof, gcdof; 3041 3042 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3043 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3044 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3045 } 3046 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3047 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3048 for (p = pStart, l = 0; p < pEnd; ++p) { 3049 const PetscInt *cind; 3050 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3051 3052 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3053 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3054 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3055 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3056 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3057 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3058 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3059 if (!gdof) continue; /* Censored point */ 3060 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3061 if (gsize != dof-cdof) { 3062 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3063 cdof = 0; /* Ignore constraints */ 3064 } 3065 for (d = 0, c = 0; d < dof; ++d) { 3066 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3067 local[l+d-c] = off+d; 3068 } 3069 if (gdof < 0) { 3070 for (d = 0; d < gsize; ++d, ++l) { 3071 PetscInt offset = -(goff+1) + d, r; 3072 3073 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 3074 if (r < 0) r = -(r+2); 3075 remote[l].rank = r; 3076 remote[l].index = offset - ranges[r]; 3077 } 3078 } else { 3079 for (d = 0; d < gsize; ++d, ++l) { 3080 remote[l].rank = rank; 3081 remote[l].index = goff+d - ranges[rank]; 3082 } 3083 } 3084 } 3085 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3086 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3087 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3088 PetscFunctionReturn(0); 3089 } 3090 3091 #undef __FUNCT__ 3092 #define __FUNCT__ "DMGetPointSF" 3093 /*@ 3094 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3095 3096 Input Parameter: 3097 . dm - The DM 3098 3099 Output Parameter: 3100 . sf - The PetscSF 3101 3102 Level: intermediate 3103 3104 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3105 3106 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3107 @*/ 3108 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3109 { 3110 PetscFunctionBegin; 3111 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3112 PetscValidPointer(sf, 2); 3113 *sf = dm->sf; 3114 PetscFunctionReturn(0); 3115 } 3116 3117 #undef __FUNCT__ 3118 #define __FUNCT__ "DMSetPointSF" 3119 /*@ 3120 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3121 3122 Input Parameters: 3123 + dm - The DM 3124 - sf - The PetscSF 3125 3126 Level: intermediate 3127 3128 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3129 @*/ 3130 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3131 { 3132 PetscErrorCode ierr; 3133 3134 PetscFunctionBegin; 3135 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3136 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3137 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3138 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3139 dm->sf = sf; 3140 PetscFunctionReturn(0); 3141 } 3142 3143 #undef __FUNCT__ 3144 #define __FUNCT__ "DMGetNumFields" 3145 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3146 { 3147 PetscFunctionBegin; 3148 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3149 PetscValidPointer(numFields, 2); 3150 *numFields = dm->numFields; 3151 PetscFunctionReturn(0); 3152 } 3153 3154 #undef __FUNCT__ 3155 #define __FUNCT__ "DMSetNumFields" 3156 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3157 { 3158 PetscInt f; 3159 PetscErrorCode ierr; 3160 3161 PetscFunctionBegin; 3162 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3163 for (f = 0; f < dm->numFields; ++f) { 3164 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3165 } 3166 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3167 dm->numFields = numFields; 3168 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3169 for (f = 0; f < dm->numFields; ++f) { 3170 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3171 } 3172 PetscFunctionReturn(0); 3173 } 3174 3175 #undef __FUNCT__ 3176 #define __FUNCT__ "DMGetField" 3177 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3178 { 3179 PetscFunctionBegin; 3180 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3181 PetscValidPointer(field, 2); 3182 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3183 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3184 *field = dm->fields[f]; 3185 PetscFunctionReturn(0); 3186 } 3187 3188 #undef __FUNCT__ 3189 #define __FUNCT__ "DMRestrictHook_Coordinates" 3190 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3191 { 3192 DM dm_coord,dmc_coord; 3193 PetscErrorCode ierr; 3194 Vec coords,ccoords; 3195 VecScatter scat; 3196 PetscFunctionBegin; 3197 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3198 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3199 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3200 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3201 if (coords && !ccoords) { 3202 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3203 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3204 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3205 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3206 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3207 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3208 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3209 } 3210 PetscFunctionReturn(0); 3211 } 3212 3213 #undef __FUNCT__ 3214 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3215 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3216 { 3217 DM dm_coord,subdm_coord; 3218 PetscErrorCode ierr; 3219 Vec coords,ccoords,clcoords; 3220 VecScatter *scat_i,*scat_g; 3221 PetscFunctionBegin; 3222 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3223 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3224 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3225 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3226 if (coords && !ccoords) { 3227 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3228 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3229 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3230 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3231 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3232 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3233 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3234 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3235 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3236 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3237 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3238 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3239 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3240 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3241 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3242 } 3243 PetscFunctionReturn(0); 3244 } 3245 3246 #undef __FUNCT__ 3247 #define __FUNCT__ "DMSetCoordinates" 3248 /*@ 3249 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3250 3251 Collective on DM 3252 3253 Input Parameters: 3254 + dm - the DM 3255 - c - coordinate vector 3256 3257 Note: 3258 The coordinates do include those for ghost points, which are in the local vector 3259 3260 Level: intermediate 3261 3262 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3263 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3264 @*/ 3265 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3266 { 3267 PetscErrorCode ierr; 3268 3269 PetscFunctionBegin; 3270 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3271 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3272 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3273 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3274 dm->coordinates = c; 3275 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3276 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3277 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3278 PetscFunctionReturn(0); 3279 } 3280 3281 #undef __FUNCT__ 3282 #define __FUNCT__ "DMSetCoordinatesLocal" 3283 /*@ 3284 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3285 3286 Collective on DM 3287 3288 Input Parameters: 3289 + dm - the DM 3290 - c - coordinate vector 3291 3292 Note: 3293 The coordinates of ghost points can be set using DMSetCoordinates() 3294 followed by DMGetCoordinatesLocal(). This is intended to enable the 3295 setting of ghost coordinates outside of the domain. 3296 3297 Level: intermediate 3298 3299 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3300 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3301 @*/ 3302 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3303 { 3304 PetscErrorCode ierr; 3305 3306 PetscFunctionBegin; 3307 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3308 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3309 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3310 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3311 3312 dm->coordinatesLocal = c; 3313 3314 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3315 PetscFunctionReturn(0); 3316 } 3317 3318 #undef __FUNCT__ 3319 #define __FUNCT__ "DMGetCoordinates" 3320 /*@ 3321 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3322 3323 Not Collective 3324 3325 Input Parameter: 3326 . dm - the DM 3327 3328 Output Parameter: 3329 . c - global coordinate vector 3330 3331 Note: 3332 This is a borrowed reference, so the user should NOT destroy this vector 3333 3334 Each process has only the local coordinates (does NOT have the ghost coordinates). 3335 3336 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3337 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3338 3339 Level: intermediate 3340 3341 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3342 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3343 @*/ 3344 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3345 { 3346 PetscErrorCode ierr; 3347 3348 PetscFunctionBegin; 3349 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3350 PetscValidPointer(c,2); 3351 if (!dm->coordinates && dm->coordinatesLocal) { 3352 DM cdm = NULL; 3353 3354 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3355 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3356 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3357 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3358 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3359 } 3360 *c = dm->coordinates; 3361 PetscFunctionReturn(0); 3362 } 3363 3364 #undef __FUNCT__ 3365 #define __FUNCT__ "DMGetCoordinatesLocal" 3366 /*@ 3367 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3368 3369 Collective on DM 3370 3371 Input Parameter: 3372 . dm - the DM 3373 3374 Output Parameter: 3375 . c - coordinate vector 3376 3377 Note: 3378 This is a borrowed reference, so the user should NOT destroy this vector 3379 3380 Each process has the local and ghost coordinates 3381 3382 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3383 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3384 3385 Level: intermediate 3386 3387 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3388 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3389 @*/ 3390 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3391 { 3392 PetscErrorCode ierr; 3393 3394 PetscFunctionBegin; 3395 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3396 PetscValidPointer(c,2); 3397 if (!dm->coordinatesLocal && dm->coordinates) { 3398 DM cdm = NULL; 3399 3400 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3401 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3402 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3403 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3404 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3405 } 3406 *c = dm->coordinatesLocal; 3407 PetscFunctionReturn(0); 3408 } 3409 3410 #undef __FUNCT__ 3411 #define __FUNCT__ "DMGetCoordinateDM" 3412 /*@ 3413 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3414 3415 Collective on DM 3416 3417 Input Parameter: 3418 . dm - the DM 3419 3420 Output Parameter: 3421 . cdm - coordinate DM 3422 3423 Level: intermediate 3424 3425 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3426 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3427 @*/ 3428 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3429 { 3430 PetscErrorCode ierr; 3431 3432 PetscFunctionBegin; 3433 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3434 PetscValidPointer(cdm,2); 3435 if (!dm->coordinateDM) { 3436 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3437 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3438 } 3439 *cdm = dm->coordinateDM; 3440 PetscFunctionReturn(0); 3441 } 3442 3443 #undef __FUNCT__ 3444 #define __FUNCT__ "DMLocatePoints" 3445 /*@ 3446 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3447 3448 Not collective 3449 3450 Input Parameters: 3451 + dm - The DM 3452 - v - The Vec of points 3453 3454 Output Parameter: 3455 . cells - The local cell numbers for cells which contain the points 3456 3457 Level: developer 3458 3459 .keywords: point location, mesh 3460 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3461 @*/ 3462 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3463 { 3464 PetscErrorCode ierr; 3465 3466 PetscFunctionBegin; 3467 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3468 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3469 PetscValidPointer(cells,3); 3470 if (dm->ops->locatepoints) { 3471 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3472 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3473 PetscFunctionReturn(0); 3474 } 3475