1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 #include <petscsf.h> 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMViewFromOptions" 9 /* 10 DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed. 11 12 Collective on Vec 13 14 Input Parameters: 15 + dm - the DM 16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm' 17 - optionname - option to activate viewing 18 19 Level: intermediate 20 21 .keywords: DM, view, options, database 22 .seealso: VecViewFromOptions(), MatViewFromOptions() 23 */ 24 PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[]) 25 { 26 PetscErrorCode ierr; 27 PetscBool flg; 28 PetscViewer viewer; 29 PetscViewerFormat format; 30 31 PetscFunctionBegin; 32 if (prefix) { 33 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 34 } else { 35 ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr); 36 } 37 if (flg) { 38 ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr); 39 ierr = DMView(dm,viewer);CHKERRQ(ierr); 40 ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr); 41 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 42 } 43 PetscFunctionReturn(0); 44 } 45 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMCreate" 49 /*@ 50 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 51 52 If you never call DMSetType() it will generate an 53 error when you try to use the vector. 54 55 Collective on MPI_Comm 56 57 Input Parameter: 58 . comm - The communicator for the DM object 59 60 Output Parameter: 61 . dm - The DM object 62 63 Level: beginner 64 65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 66 @*/ 67 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 68 { 69 DM v; 70 PetscErrorCode ierr; 71 72 PetscFunctionBegin; 73 PetscValidPointer(dm,2); 74 *dm = NULL; 75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 76 ierr = VecInitializePackage();CHKERRQ(ierr); 77 ierr = MatInitializePackage();CHKERRQ(ierr); 78 ierr = DMInitializePackage();CHKERRQ(ierr); 79 #endif 80 81 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 82 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 83 84 85 v->ltogmap = NULL; 86 v->ltogmapb = NULL; 87 v->bs = 1; 88 v->coloringtype = IS_COLORING_GLOBAL; 89 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 90 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 91 v->defaultSection = NULL; 92 v->defaultGlobalSection = NULL; 93 { 94 PetscInt i; 95 for (i = 0; i < 10; ++i) { 96 v->nullspaceConstructors[i] = NULL; 97 } 98 } 99 v->numFields = 0; 100 v->fields = NULL; 101 102 *dm = v; 103 PetscFunctionReturn(0); 104 } 105 106 #undef __FUNCT__ 107 #define __FUNCT__ "DMClone" 108 /*@ 109 DMClone - Creates a DM object with the same topology as the original. 110 111 Collective on MPI_Comm 112 113 Input Parameter: 114 . dm - The original DM object 115 116 Output Parameter: 117 . newdm - The new DM object 118 119 Level: beginner 120 121 .keywords: DM, topology, create 122 @*/ 123 PetscErrorCode DMClone(DM dm, DM *newdm) 124 { 125 PetscSF sf; 126 Vec coords; 127 void *ctx; 128 PetscErrorCode ierr; 129 130 PetscFunctionBegin; 131 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 132 PetscValidPointer(newdm,2); 133 ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr); 134 if (dm->ops->clone) { 135 ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr); 136 } 137 ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr); 138 ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr); 139 ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr); 140 ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr); 141 ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr); 142 if (coords) { 143 ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr); 144 } else { 145 ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr); 146 if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);} 147 } 148 PetscFunctionReturn(0); 149 } 150 151 #undef __FUNCT__ 152 #define __FUNCT__ "DMSetVecType" 153 /*@C 154 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 155 156 Logically Collective on DMDA 157 158 Input Parameter: 159 + da - initial distributed array 160 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 161 162 Options Database: 163 . -dm_vec_type ctype 164 165 Level: intermediate 166 167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 168 @*/ 169 PetscErrorCode DMSetVecType(DM da,VecType ctype) 170 { 171 PetscErrorCode ierr; 172 173 PetscFunctionBegin; 174 PetscValidHeaderSpecific(da,DM_CLASSID,1); 175 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 176 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 177 PetscFunctionReturn(0); 178 } 179 180 #undef __FUNCT__ 181 #define __FUNCT__ "VecGetDM" 182 /*@ 183 VecGetDM - Gets the DM defining the data layout of the vector 184 185 Not collective 186 187 Input Parameter: 188 . v - The Vec 189 190 Output Parameter: 191 . dm - The DM 192 193 Level: intermediate 194 195 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 196 @*/ 197 PetscErrorCode VecGetDM(Vec v, DM *dm) 198 { 199 PetscErrorCode ierr; 200 201 PetscFunctionBegin; 202 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 203 PetscValidPointer(dm,2); 204 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 205 PetscFunctionReturn(0); 206 } 207 208 #undef __FUNCT__ 209 #define __FUNCT__ "VecSetDM" 210 /*@ 211 VecSetDM - Sets the DM defining the data layout of the vector 212 213 Not collective 214 215 Input Parameters: 216 + v - The Vec 217 - dm - The DM 218 219 Level: intermediate 220 221 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 222 @*/ 223 PetscErrorCode VecSetDM(Vec v, DM dm) 224 { 225 PetscErrorCode ierr; 226 227 PetscFunctionBegin; 228 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 229 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 230 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 231 PetscFunctionReturn(0); 232 } 233 234 #undef __FUNCT__ 235 #define __FUNCT__ "DMSetMatType" 236 /*@C 237 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 238 239 Logically Collective on DM 240 241 Input Parameter: 242 + dm - the DM context 243 . ctype - the matrix type 244 245 Options Database: 246 . -dm_mat_type ctype 247 248 Level: intermediate 249 250 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 251 @*/ 252 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 253 { 254 PetscErrorCode ierr; 255 256 PetscFunctionBegin; 257 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 258 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 259 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 260 PetscFunctionReturn(0); 261 } 262 263 #undef __FUNCT__ 264 #define __FUNCT__ "MatGetDM" 265 /*@ 266 MatGetDM - Gets the DM defining the data layout of the matrix 267 268 Not collective 269 270 Input Parameter: 271 . A - The Mat 272 273 Output Parameter: 274 . dm - The DM 275 276 Level: intermediate 277 278 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 279 @*/ 280 PetscErrorCode MatGetDM(Mat A, DM *dm) 281 { 282 PetscErrorCode ierr; 283 284 PetscFunctionBegin; 285 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 286 PetscValidPointer(dm,2); 287 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr); 288 PetscFunctionReturn(0); 289 } 290 291 #undef __FUNCT__ 292 #define __FUNCT__ "MatSetDM" 293 /*@ 294 MatSetDM - Sets the DM defining the data layout of the matrix 295 296 Not collective 297 298 Input Parameters: 299 + A - The Mat 300 - dm - The DM 301 302 Level: intermediate 303 304 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 305 @*/ 306 PetscErrorCode MatSetDM(Mat A, DM dm) 307 { 308 PetscErrorCode ierr; 309 310 PetscFunctionBegin; 311 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 312 if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2); 313 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 314 PetscFunctionReturn(0); 315 } 316 317 #undef __FUNCT__ 318 #define __FUNCT__ "DMSetOptionsPrefix" 319 /*@C 320 DMSetOptionsPrefix - Sets the prefix used for searching for all 321 DMDA options in the database. 322 323 Logically Collective on DMDA 324 325 Input Parameter: 326 + da - the DMDA context 327 - prefix - the prefix to prepend to all option names 328 329 Notes: 330 A hyphen (-) must NOT be given at the beginning of the prefix name. 331 The first character of all runtime options is AUTOMATICALLY the hyphen. 332 333 Level: advanced 334 335 .keywords: DMDA, set, options, prefix, database 336 337 .seealso: DMSetFromOptions() 338 @*/ 339 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 340 { 341 PetscErrorCode ierr; 342 343 PetscFunctionBegin; 344 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 345 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 346 PetscFunctionReturn(0); 347 } 348 349 #undef __FUNCT__ 350 #define __FUNCT__ "DMDestroy" 351 /*@ 352 DMDestroy - Destroys a vector packer or DMDA. 353 354 Collective on DM 355 356 Input Parameter: 357 . dm - the DM object to destroy 358 359 Level: developer 360 361 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 362 363 @*/ 364 PetscErrorCode DMDestroy(DM *dm) 365 { 366 PetscInt i, cnt = 0, f; 367 DMNamedVecLink nlink,nnext; 368 PetscErrorCode ierr; 369 370 PetscFunctionBegin; 371 if (!*dm) PetscFunctionReturn(0); 372 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 373 374 /* count all the circular references of DM and its contained Vecs */ 375 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 376 if ((*dm)->localin[i]) cnt++; 377 if ((*dm)->globalin[i]) cnt++; 378 } 379 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 380 if ((*dm)->x) { 381 DM obj; 382 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 383 if (obj == *dm) cnt++; 384 } 385 for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++; 386 if ((*dm)->x) { 387 DM obj; 388 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 389 if (obj == *dm) cnt++; 390 } 391 392 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 393 /* 394 Need this test because the dm references the vectors that 395 reference the dm, so destroying the dm calls destroy on the 396 vectors that cause another destroy on the dm 397 */ 398 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 399 ((PetscObject) (*dm))->refct = 0; 400 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 401 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 402 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 403 } 404 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 405 nnext = nlink->next; 406 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 407 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 408 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 409 ierr = PetscFree(nlink);CHKERRQ(ierr); 410 } 411 (*dm)->namedglobal = NULL; 412 413 for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */ 414 nnext = nlink->next; 415 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 416 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 417 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 418 ierr = PetscFree(nlink);CHKERRQ(ierr); 419 } 420 (*dm)->namedlocal = NULL; 421 422 /* Destroy the list of hooks */ 423 { 424 DMCoarsenHookLink link,next; 425 for (link=(*dm)->coarsenhook; link; link=next) { 426 next = link->next; 427 ierr = PetscFree(link);CHKERRQ(ierr); 428 } 429 (*dm)->coarsenhook = NULL; 430 } 431 { 432 DMRefineHookLink link,next; 433 for (link=(*dm)->refinehook; link; link=next) { 434 next = link->next; 435 ierr = PetscFree(link);CHKERRQ(ierr); 436 } 437 (*dm)->refinehook = NULL; 438 } 439 { 440 DMSubDomainHookLink link,next; 441 for (link=(*dm)->subdomainhook; link; link=next) { 442 next = link->next; 443 ierr = PetscFree(link);CHKERRQ(ierr); 444 } 445 (*dm)->subdomainhook = NULL; 446 } 447 { 448 DMGlobalToLocalHookLink link,next; 449 for (link=(*dm)->gtolhook; link; link=next) { 450 next = link->next; 451 ierr = PetscFree(link);CHKERRQ(ierr); 452 } 453 (*dm)->gtolhook = NULL; 454 } 455 /* Destroy the work arrays */ 456 { 457 DMWorkLink link,next; 458 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 459 for (link=(*dm)->workin; link; link=next) { 460 next = link->next; 461 ierr = PetscFree(link->mem);CHKERRQ(ierr); 462 ierr = PetscFree(link);CHKERRQ(ierr); 463 } 464 (*dm)->workin = NULL; 465 } 466 467 ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr); 468 ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr); 469 ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr); 470 471 if ((*dm)->ctx && (*dm)->ctxdestroy) { 472 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 473 } 474 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 475 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 476 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 477 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 478 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 479 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 480 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 481 482 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 483 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 484 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 485 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 486 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 487 488 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 489 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 490 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 491 492 for (f = 0; f < (*dm)->numFields; ++f) { 493 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 494 } 495 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 496 /* if memory was published with AMS then destroy it */ 497 ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr); 498 499 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 500 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 501 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 502 PetscFunctionReturn(0); 503 } 504 505 #undef __FUNCT__ 506 #define __FUNCT__ "DMSetUp" 507 /*@ 508 DMSetUp - sets up the data structures inside a DM object 509 510 Collective on DM 511 512 Input Parameter: 513 . dm - the DM object to setup 514 515 Level: developer 516 517 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 518 519 @*/ 520 PetscErrorCode DMSetUp(DM dm) 521 { 522 PetscErrorCode ierr; 523 524 PetscFunctionBegin; 525 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 526 if (dm->setupcalled) PetscFunctionReturn(0); 527 if (dm->ops->setup) { 528 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 529 } 530 dm->setupcalled = PETSC_TRUE; 531 PetscFunctionReturn(0); 532 } 533 534 #undef __FUNCT__ 535 #define __FUNCT__ "DMSetFromOptions" 536 /*@ 537 DMSetFromOptions - sets parameters in a DM from the options database 538 539 Collective on DM 540 541 Input Parameter: 542 . dm - the DM object to set options for 543 544 Options Database: 545 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 546 . -dm_vec_type <type> type of vector to create inside DM 547 . -dm_mat_type <type> type of matrix to create inside DM 548 - -dm_coloring_type <global or ghosted> 549 550 Level: developer 551 552 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 553 554 @*/ 555 PetscErrorCode DMSetFromOptions(DM dm) 556 { 557 char typeName[256] = MATAIJ; 558 PetscBool flg; 559 PetscErrorCode ierr; 560 561 PetscFunctionBegin; 562 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 563 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 564 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr); 565 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 566 if (flg) { 567 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 568 } 569 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 570 if (flg) { 571 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 572 } 573 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr); 574 if (dm->ops->setfromoptions) { 575 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 576 } 577 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 578 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 579 ierr = PetscOptionsEnd();CHKERRQ(ierr); 580 ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr); 581 PetscFunctionReturn(0); 582 } 583 584 #undef __FUNCT__ 585 #define __FUNCT__ "DMView" 586 /*@C 587 DMView - Views a vector packer or DMDA. 588 589 Collective on DM 590 591 Input Parameter: 592 + dm - the DM object to view 593 - v - the viewer 594 595 Level: developer 596 597 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 598 599 @*/ 600 PetscErrorCode DMView(DM dm,PetscViewer v) 601 { 602 PetscErrorCode ierr; 603 PetscBool isbinary; 604 605 PetscFunctionBegin; 606 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 607 if (!v) { 608 ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr); 609 } 610 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 611 if (isbinary) { 612 PetscInt classid = DM_FILE_CLASSID; 613 char type[256]; 614 615 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 616 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 617 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 618 } 619 if (dm->ops->view) { 620 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 621 } 622 PetscFunctionReturn(0); 623 } 624 625 #undef __FUNCT__ 626 #define __FUNCT__ "DMCreateGlobalVector" 627 /*@ 628 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 629 630 Collective on DM 631 632 Input Parameter: 633 . dm - the DM object 634 635 Output Parameter: 636 . vec - the global vector 637 638 Level: beginner 639 640 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 641 642 @*/ 643 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 644 { 645 PetscErrorCode ierr; 646 647 PetscFunctionBegin; 648 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 649 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 650 PetscFunctionReturn(0); 651 } 652 653 #undef __FUNCT__ 654 #define __FUNCT__ "DMCreateLocalVector" 655 /*@ 656 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 657 658 Not Collective 659 660 Input Parameter: 661 . dm - the DM object 662 663 Output Parameter: 664 . vec - the local vector 665 666 Level: beginner 667 668 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 669 670 @*/ 671 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 672 { 673 PetscErrorCode ierr; 674 675 PetscFunctionBegin; 676 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 677 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 678 PetscFunctionReturn(0); 679 } 680 681 #undef __FUNCT__ 682 #define __FUNCT__ "DMGetLocalToGlobalMapping" 683 /*@ 684 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 685 686 Collective on DM 687 688 Input Parameter: 689 . dm - the DM that provides the mapping 690 691 Output Parameter: 692 . ltog - the mapping 693 694 Level: intermediate 695 696 Notes: 697 This mapping can then be used by VecSetLocalToGlobalMapping() or 698 MatSetLocalToGlobalMapping(). 699 700 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 701 @*/ 702 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 703 { 704 PetscErrorCode ierr; 705 706 PetscFunctionBegin; 707 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 708 PetscValidPointer(ltog,2); 709 if (!dm->ltogmap) { 710 PetscSection section, sectionGlobal; 711 712 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 713 if (section) { 714 PetscInt *ltog; 715 PetscInt pStart, pEnd, size, p, l; 716 717 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 718 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 719 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 720 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 721 for (p = pStart, l = 0; p < pEnd; ++p) { 722 PetscInt dof, off, c; 723 724 /* Should probably use constrained dofs */ 725 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 726 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 727 for (c = 0; c < dof; ++c, ++l) { 728 ltog[l] = off+c; 729 } 730 } 731 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 732 ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr); 733 } else { 734 if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 735 ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr); 736 } 737 } 738 *ltog = dm->ltogmap; 739 PetscFunctionReturn(0); 740 } 741 742 #undef __FUNCT__ 743 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 744 /*@ 745 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 746 747 Collective on DM 748 749 Input Parameter: 750 . da - the distributed array that provides the mapping 751 752 Output Parameter: 753 . ltog - the block mapping 754 755 Level: intermediate 756 757 Notes: 758 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 759 MatSetLocalToGlobalMappingBlock(). 760 761 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 762 @*/ 763 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 764 { 765 PetscErrorCode ierr; 766 767 PetscFunctionBegin; 768 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 769 PetscValidPointer(ltog,2); 770 if (!dm->ltogmapb) { 771 PetscInt bs; 772 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 773 if (bs > 1) { 774 if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 775 ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 776 } else { 777 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 778 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 779 } 780 } 781 *ltog = dm->ltogmapb; 782 PetscFunctionReturn(0); 783 } 784 785 #undef __FUNCT__ 786 #define __FUNCT__ "DMGetBlockSize" 787 /*@ 788 DMGetBlockSize - Gets the inherent block size associated with a DM 789 790 Not Collective 791 792 Input Parameter: 793 . dm - the DM with block structure 794 795 Output Parameter: 796 . bs - the block size, 1 implies no exploitable block structure 797 798 Level: intermediate 799 800 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 801 @*/ 802 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 803 { 804 PetscFunctionBegin; 805 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 806 PetscValidPointer(bs,2); 807 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 808 *bs = dm->bs; 809 PetscFunctionReturn(0); 810 } 811 812 #undef __FUNCT__ 813 #define __FUNCT__ "DMCreateInterpolation" 814 /*@ 815 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 816 817 Collective on DM 818 819 Input Parameter: 820 + dm1 - the DM object 821 - dm2 - the second, finer DM object 822 823 Output Parameter: 824 + mat - the interpolation 825 - vec - the scaling (optional) 826 827 Level: developer 828 829 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 830 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 831 832 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 833 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 834 835 836 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 837 838 @*/ 839 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 840 { 841 PetscErrorCode ierr; 842 843 PetscFunctionBegin; 844 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 845 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 846 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 847 PetscFunctionReturn(0); 848 } 849 850 #undef __FUNCT__ 851 #define __FUNCT__ "DMCreateInjection" 852 /*@ 853 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 854 855 Collective on DM 856 857 Input Parameter: 858 + dm1 - the DM object 859 - dm2 - the second, finer DM object 860 861 Output Parameter: 862 . ctx - the injection 863 864 Level: developer 865 866 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 867 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 868 869 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 870 871 @*/ 872 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 873 { 874 PetscErrorCode ierr; 875 876 PetscFunctionBegin; 877 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 878 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 879 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 880 PetscFunctionReturn(0); 881 } 882 883 #undef __FUNCT__ 884 #define __FUNCT__ "DMCreateColoring" 885 /*@C 886 DMCreateColoring - Gets coloring for a DMDA or DMComposite 887 888 Collective on DM 889 890 Input Parameter: 891 + dm - the DM object 892 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 893 - matype - either MATAIJ or MATBAIJ 894 895 Output Parameter: 896 . coloring - the coloring 897 898 Level: developer 899 900 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 901 902 @*/ 903 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 904 { 905 PetscErrorCode ierr; 906 907 PetscFunctionBegin; 908 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 909 if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet"); 910 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 911 PetscFunctionReturn(0); 912 } 913 914 #undef __FUNCT__ 915 #define __FUNCT__ "DMCreateMatrix" 916 /*@C 917 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 918 919 Collective on DM 920 921 Input Parameter: 922 + dm - the DM object 923 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 924 any type which inherits from one of these (such as MATAIJ) 925 926 Output Parameter: 927 . mat - the empty Jacobian 928 929 Level: beginner 930 931 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 932 do not need to do it yourself. 933 934 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 935 the nonzero pattern call DMDASetMatPreallocateOnly() 936 937 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 938 internally by PETSc. 939 940 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 941 the indices for the global numbering for DMDAs which is complicated. 942 943 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 944 945 @*/ 946 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 947 { 948 PetscErrorCode ierr; 949 950 PetscFunctionBegin; 951 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 952 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 953 ierr = MatInitializePackage();CHKERRQ(ierr); 954 #endif 955 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 956 PetscValidPointer(mat,3); 957 if (dm->mattype) { 958 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 959 } else { 960 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 961 } 962 PetscFunctionReturn(0); 963 } 964 965 #undef __FUNCT__ 966 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 967 /*@ 968 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 969 preallocated but the nonzero structure and zero values will not be set. 970 971 Logically Collective on DMDA 972 973 Input Parameter: 974 + dm - the DM 975 - only - PETSC_TRUE if only want preallocation 976 977 Level: developer 978 .seealso DMCreateMatrix() 979 @*/ 980 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 981 { 982 PetscFunctionBegin; 983 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 984 dm->prealloc_only = only; 985 PetscFunctionReturn(0); 986 } 987 988 #undef __FUNCT__ 989 #define __FUNCT__ "DMGetWorkArray" 990 /*@C 991 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 992 993 Not Collective 994 995 Input Parameters: 996 + dm - the DM object 997 . count - The minium size 998 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 999 1000 Output Parameter: 1001 . array - the work array 1002 1003 Level: developer 1004 1005 .seealso DMDestroy(), DMCreate() 1006 @*/ 1007 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1008 { 1009 PetscErrorCode ierr; 1010 DMWorkLink link; 1011 size_t size; 1012 1013 PetscFunctionBegin; 1014 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1015 PetscValidPointer(mem,4); 1016 if (dm->workin) { 1017 link = dm->workin; 1018 dm->workin = dm->workin->next; 1019 } else { 1020 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 1021 } 1022 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1023 if (size*count > link->bytes) { 1024 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1025 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1026 link->bytes = size*count; 1027 } 1028 link->next = dm->workout; 1029 dm->workout = link; 1030 *(void**)mem = link->mem; 1031 PetscFunctionReturn(0); 1032 } 1033 1034 #undef __FUNCT__ 1035 #define __FUNCT__ "DMRestoreWorkArray" 1036 /*@C 1037 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1038 1039 Not Collective 1040 1041 Input Parameters: 1042 + dm - the DM object 1043 . count - The minium size 1044 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1045 1046 Output Parameter: 1047 . array - the work array 1048 1049 Level: developer 1050 1051 .seealso DMDestroy(), DMCreate() 1052 @*/ 1053 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1054 { 1055 DMWorkLink *p,link; 1056 1057 PetscFunctionBegin; 1058 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1059 PetscValidPointer(mem,4); 1060 for (p=&dm->workout; (link=*p); p=&link->next) { 1061 if (link->mem == *(void**)mem) { 1062 *p = link->next; 1063 link->next = dm->workin; 1064 dm->workin = link; 1065 *(void**)mem = NULL; 1066 PetscFunctionReturn(0); 1067 } 1068 } 1069 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1070 PetscFunctionReturn(0); 1071 } 1072 1073 #undef __FUNCT__ 1074 #define __FUNCT__ "DMSetNullSpaceConstructor" 1075 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1076 { 1077 PetscFunctionBegin; 1078 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1079 if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1080 dm->nullspaceConstructors[field] = nullsp; 1081 PetscFunctionReturn(0); 1082 } 1083 1084 #undef __FUNCT__ 1085 #define __FUNCT__ "DMCreateFieldIS" 1086 /*@C 1087 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1088 1089 Not collective 1090 1091 Input Parameter: 1092 . dm - the DM object 1093 1094 Output Parameters: 1095 + numFields - The number of fields (or NULL if not requested) 1096 . fieldNames - The name for each field (or NULL if not requested) 1097 - fields - The global indices for each field (or NULL if not requested) 1098 1099 Level: intermediate 1100 1101 Notes: 1102 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1103 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1104 PetscFree(). 1105 1106 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1107 @*/ 1108 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1109 { 1110 PetscSection section, sectionGlobal; 1111 PetscErrorCode ierr; 1112 1113 PetscFunctionBegin; 1114 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1115 if (numFields) { 1116 PetscValidPointer(numFields,2); 1117 *numFields = 0; 1118 } 1119 if (fieldNames) { 1120 PetscValidPointer(fieldNames,3); 1121 *fieldNames = NULL; 1122 } 1123 if (fields) { 1124 PetscValidPointer(fields,4); 1125 *fields = NULL; 1126 } 1127 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1128 if (section) { 1129 PetscInt *fieldSizes, **fieldIndices; 1130 PetscInt nF, f, pStart, pEnd, p; 1131 1132 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1133 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1134 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr); 1135 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1136 for (f = 0; f < nF; ++f) { 1137 fieldSizes[f] = 0; 1138 } 1139 for (p = pStart; p < pEnd; ++p) { 1140 PetscInt gdof; 1141 1142 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1143 if (gdof > 0) { 1144 for (f = 0; f < nF; ++f) { 1145 PetscInt fdof, fcdof; 1146 1147 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1148 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1149 fieldSizes[f] += fdof-fcdof; 1150 } 1151 } 1152 } 1153 for (f = 0; f < nF; ++f) { 1154 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1155 fieldSizes[f] = 0; 1156 } 1157 for (p = pStart; p < pEnd; ++p) { 1158 PetscInt gdof, goff; 1159 1160 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1161 if (gdof > 0) { 1162 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1163 for (f = 0; f < nF; ++f) { 1164 PetscInt fdof, fcdof, fc; 1165 1166 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1167 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1168 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1169 fieldIndices[f][fieldSizes[f]] = goff++; 1170 } 1171 } 1172 } 1173 } 1174 if (numFields) *numFields = nF; 1175 if (fieldNames) { 1176 ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr); 1177 for (f = 0; f < nF; ++f) { 1178 const char *fieldName; 1179 1180 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1181 ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr); 1182 } 1183 } 1184 if (fields) { 1185 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1186 for (f = 0; f < nF; ++f) { 1187 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1188 } 1189 } 1190 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1191 } else if (dm->ops->createfieldis) { 1192 ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr); 1193 } 1194 PetscFunctionReturn(0); 1195 } 1196 1197 1198 #undef __FUNCT__ 1199 #define __FUNCT__ "DMCreateFieldDecomposition" 1200 /*@C 1201 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1202 corresponding to different fields: each IS contains the global indices of the dofs of the 1203 corresponding field. The optional list of DMs define the DM for each subproblem. 1204 Generalizes DMCreateFieldIS(). 1205 1206 Not collective 1207 1208 Input Parameter: 1209 . dm - the DM object 1210 1211 Output Parameters: 1212 + len - The number of subproblems in the field decomposition (or NULL if not requested) 1213 . namelist - The name for each field (or NULL if not requested) 1214 . islist - The global indices for each field (or NULL if not requested) 1215 - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1216 1217 Level: intermediate 1218 1219 Notes: 1220 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1221 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1222 and all of the arrays should be freed with PetscFree(). 1223 1224 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1225 @*/ 1226 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1227 { 1228 PetscErrorCode ierr; 1229 1230 PetscFunctionBegin; 1231 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1232 if (len) { 1233 PetscValidPointer(len,2); 1234 *len = 0; 1235 } 1236 if (namelist) { 1237 PetscValidPointer(namelist,3); 1238 *namelist = 0; 1239 } 1240 if (islist) { 1241 PetscValidPointer(islist,4); 1242 *islist = 0; 1243 } 1244 if (dmlist) { 1245 PetscValidPointer(dmlist,5); 1246 *dmlist = 0; 1247 } 1248 /* 1249 Is it a good idea to apply the following check across all impls? 1250 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1251 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1252 */ 1253 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1254 if (!dm->ops->createfielddecomposition) { 1255 PetscSection section; 1256 PetscInt numFields, f; 1257 1258 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1259 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1260 if (section && numFields && dm->ops->createsubdm) { 1261 *len = numFields; 1262 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1263 for (f = 0; f < numFields; ++f) { 1264 const char *fieldName; 1265 1266 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1267 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1268 ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr); 1269 } 1270 } else { 1271 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1272 /* By default there are no DMs associated with subproblems. */ 1273 if (dmlist) *dmlist = NULL; 1274 } 1275 } else { 1276 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr); 1277 } 1278 PetscFunctionReturn(0); 1279 } 1280 1281 #undef __FUNCT__ 1282 #define __FUNCT__ "DMCreateSubDM" 1283 /*@C 1284 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1285 The fields are defined by DMCreateFieldIS(). 1286 1287 Not collective 1288 1289 Input Parameters: 1290 + dm - the DM object 1291 . numFields - number of fields in this subproblem 1292 - len - The number of subproblems in the decomposition (or NULL if not requested) 1293 1294 Output Parameters: 1295 . is - The global indices for the subproblem 1296 - dm - The DM for the subproblem 1297 1298 Level: intermediate 1299 1300 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1301 @*/ 1302 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1303 { 1304 PetscErrorCode ierr; 1305 1306 PetscFunctionBegin; 1307 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1308 PetscValidPointer(fields,3); 1309 if (is) PetscValidPointer(is,4); 1310 if (subdm) PetscValidPointer(subdm,5); 1311 if (dm->ops->createsubdm) { 1312 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr); 1313 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1314 PetscFunctionReturn(0); 1315 } 1316 1317 1318 #undef __FUNCT__ 1319 #define __FUNCT__ "DMCreateDomainDecomposition" 1320 /*@C 1321 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1322 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1323 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1324 define a nonoverlapping covering, while outer subdomains can overlap. 1325 The optional list of DMs define the DM for each subproblem. 1326 1327 Not collective 1328 1329 Input Parameter: 1330 . dm - the DM object 1331 1332 Output Parameters: 1333 + len - The number of subproblems in the domain decomposition (or NULL if not requested) 1334 . namelist - The name for each subdomain (or NULL if not requested) 1335 . innerislist - The global indices for each inner subdomain (or NULL, if not requested) 1336 . outerislist - The global indices for each outer subdomain (or NULL, if not requested) 1337 - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined) 1338 1339 Level: intermediate 1340 1341 Notes: 1342 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1343 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1344 and all of the arrays should be freed with PetscFree(). 1345 1346 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1347 @*/ 1348 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1349 { 1350 PetscErrorCode ierr; 1351 DMSubDomainHookLink link; 1352 PetscInt i,l; 1353 1354 PetscFunctionBegin; 1355 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1356 if (len) {PetscValidPointer(len,2); *len = 0;} 1357 if (namelist) {PetscValidPointer(namelist,3); *namelist = NULL;} 1358 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = NULL;} 1359 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = NULL;} 1360 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = NULL;} 1361 /* 1362 Is it a good idea to apply the following check across all impls? 1363 Perhaps some impls can have a well-defined decomposition before DMSetUp? 1364 This, however, follows the general principle that accessors are not well-behaved until the object is set up. 1365 */ 1366 if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp"); 1367 if (dm->ops->createdomaindecomposition) { 1368 ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr); 1369 /* copy subdomain hooks and context over to the subdomain DMs */ 1370 if (dmlist) { 1371 for (i = 0; i < l; i++) { 1372 for (link=dm->subdomainhook; link; link=link->next) { 1373 if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);} 1374 } 1375 (*dmlist)[i]->ctx = dm->ctx; 1376 } 1377 } 1378 if (len) *len = l; 1379 } 1380 PetscFunctionReturn(0); 1381 } 1382 1383 1384 #undef __FUNCT__ 1385 #define __FUNCT__ "DMCreateDomainDecompositionScatters" 1386 /*@C 1387 DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector 1388 1389 Not collective 1390 1391 Input Parameters: 1392 + dm - the DM object 1393 . n - the number of subdomain scatters 1394 - subdms - the local subdomains 1395 1396 Output Parameters: 1397 + n - the number of scatters returned 1398 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain 1399 . oscat - scatter from global vector to overlapping global vector entries on subdomain 1400 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts) 1401 1402 Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution 1403 of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets 1404 of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of 1405 solution and residual data. 1406 1407 Level: developer 1408 1409 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1410 @*/ 1411 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat) 1412 { 1413 PetscErrorCode ierr; 1414 1415 PetscFunctionBegin; 1416 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1417 PetscValidPointer(subdms,3); 1418 if (dm->ops->createddscatters) { 1419 ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr); 1420 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined"); 1421 PetscFunctionReturn(0); 1422 } 1423 1424 #undef __FUNCT__ 1425 #define __FUNCT__ "DMRefine" 1426 /*@ 1427 DMRefine - Refines a DM object 1428 1429 Collective on DM 1430 1431 Input Parameter: 1432 + dm - the DM object 1433 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1434 1435 Output Parameter: 1436 . dmf - the refined DM, or NULL 1437 1438 Note: If no refinement was done, the return value is NULL 1439 1440 Level: developer 1441 1442 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1443 @*/ 1444 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1445 { 1446 PetscErrorCode ierr; 1447 DMRefineHookLink link; 1448 1449 PetscFunctionBegin; 1450 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1451 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1452 if (*dmf) { 1453 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1454 1455 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1456 1457 (*dmf)->ctx = dm->ctx; 1458 (*dmf)->leveldown = dm->leveldown; 1459 (*dmf)->levelup = dm->levelup + 1; 1460 1461 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1462 for (link=dm->refinehook; link; link=link->next) { 1463 if (link->refinehook) { 1464 ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr); 1465 } 1466 } 1467 } 1468 PetscFunctionReturn(0); 1469 } 1470 1471 #undef __FUNCT__ 1472 #define __FUNCT__ "DMRefineHookAdd" 1473 /*@C 1474 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1475 1476 Logically Collective 1477 1478 Input Arguments: 1479 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1480 . refinehook - function to run when setting up a coarser level 1481 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1482 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1483 1484 Calling sequence of refinehook: 1485 $ refinehook(DM coarse,DM fine,void *ctx); 1486 1487 + coarse - coarse level DM 1488 . fine - fine level DM to interpolate problem to 1489 - ctx - optional user-defined function context 1490 1491 Calling sequence for interphook: 1492 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1493 1494 + coarse - coarse level DM 1495 . interp - matrix interpolating a coarse-level solution to the finer grid 1496 . fine - fine level DM to update 1497 - ctx - optional user-defined function context 1498 1499 Level: advanced 1500 1501 Notes: 1502 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1503 1504 If this function is called multiple times, the hooks will be run in the order they are added. 1505 1506 This function is currently not available from Fortran. 1507 1508 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1509 @*/ 1510 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1511 { 1512 PetscErrorCode ierr; 1513 DMRefineHookLink link,*p; 1514 1515 PetscFunctionBegin; 1516 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1517 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1518 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1519 link->refinehook = refinehook; 1520 link->interphook = interphook; 1521 link->ctx = ctx; 1522 link->next = NULL; 1523 *p = link; 1524 PetscFunctionReturn(0); 1525 } 1526 1527 #undef __FUNCT__ 1528 #define __FUNCT__ "DMInterpolate" 1529 /*@ 1530 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1531 1532 Collective if any hooks are 1533 1534 Input Arguments: 1535 + coarse - coarser DM to use as a base 1536 . restrct - interpolation matrix, apply using MatInterpolate() 1537 - fine - finer DM to update 1538 1539 Level: developer 1540 1541 .seealso: DMRefineHookAdd(), MatInterpolate() 1542 @*/ 1543 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1544 { 1545 PetscErrorCode ierr; 1546 DMRefineHookLink link; 1547 1548 PetscFunctionBegin; 1549 for (link=fine->refinehook; link; link=link->next) { 1550 if (link->interphook) { 1551 ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr); 1552 } 1553 } 1554 PetscFunctionReturn(0); 1555 } 1556 1557 #undef __FUNCT__ 1558 #define __FUNCT__ "DMGetRefineLevel" 1559 /*@ 1560 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1561 1562 Not Collective 1563 1564 Input Parameter: 1565 . dm - the DM object 1566 1567 Output Parameter: 1568 . level - number of refinements 1569 1570 Level: developer 1571 1572 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1573 1574 @*/ 1575 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1576 { 1577 PetscFunctionBegin; 1578 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1579 *level = dm->levelup; 1580 PetscFunctionReturn(0); 1581 } 1582 1583 #undef __FUNCT__ 1584 #define __FUNCT__ "DMGlobalToLocalHookAdd" 1585 /*@C 1586 DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called 1587 1588 Logically Collective 1589 1590 Input Arguments: 1591 + dm - the DM 1592 . beginhook - function to run at the beginning of DMGlobalToLocalBegin() 1593 . endhook - function to run after DMGlobalToLocalEnd() has completed 1594 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1595 1596 Calling sequence for beginhook: 1597 $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1598 1599 + dm - global DM 1600 . g - global vector 1601 . mode - mode 1602 . l - local vector 1603 - ctx - optional user-defined function context 1604 1605 1606 Calling sequence for endhook: 1607 $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1608 1609 + global - global DM 1610 - ctx - optional user-defined function context 1611 1612 Level: advanced 1613 1614 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1615 @*/ 1616 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx) 1617 { 1618 PetscErrorCode ierr; 1619 DMGlobalToLocalHookLink link,*p; 1620 1621 PetscFunctionBegin; 1622 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1623 for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1624 ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr); 1625 link->beginhook = beginhook; 1626 link->endhook = endhook; 1627 link->ctx = ctx; 1628 link->next = NULL; 1629 *p = link; 1630 PetscFunctionReturn(0); 1631 } 1632 1633 #undef __FUNCT__ 1634 #define __FUNCT__ "DMGlobalToLocalBegin" 1635 /*@ 1636 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1637 1638 Neighbor-wise Collective on DM 1639 1640 Input Parameters: 1641 + dm - the DM object 1642 . g - the global vector 1643 . mode - INSERT_VALUES or ADD_VALUES 1644 - l - the local vector 1645 1646 1647 Level: beginner 1648 1649 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1650 1651 @*/ 1652 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1653 { 1654 PetscSF sf; 1655 PetscErrorCode ierr; 1656 DMGlobalToLocalHookLink link; 1657 1658 PetscFunctionBegin; 1659 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1660 for (link=dm->gtolhook; link; link=link->next) { 1661 if (link->beginhook) { 1662 ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr); 1663 } 1664 } 1665 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1666 if (sf) { 1667 PetscScalar *lArray, *gArray; 1668 1669 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1670 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1671 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1672 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1673 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1674 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1675 } else { 1676 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1677 } 1678 PetscFunctionReturn(0); 1679 } 1680 1681 #undef __FUNCT__ 1682 #define __FUNCT__ "DMGlobalToLocalEnd" 1683 /*@ 1684 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1685 1686 Neighbor-wise Collective on DM 1687 1688 Input Parameters: 1689 + dm - the DM object 1690 . g - the global vector 1691 . mode - INSERT_VALUES or ADD_VALUES 1692 - l - the local vector 1693 1694 1695 Level: beginner 1696 1697 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1698 1699 @*/ 1700 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1701 { 1702 PetscSF sf; 1703 PetscErrorCode ierr; 1704 PetscScalar *lArray, *gArray; 1705 DMGlobalToLocalHookLink link; 1706 1707 PetscFunctionBegin; 1708 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1709 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1710 if (sf) { 1711 if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1712 1713 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1714 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1715 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1716 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1717 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1718 } else { 1719 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1720 } 1721 for (link=dm->gtolhook; link; link=link->next) { 1722 if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);} 1723 } 1724 PetscFunctionReturn(0); 1725 } 1726 1727 #undef __FUNCT__ 1728 #define __FUNCT__ "DMLocalToGlobalBegin" 1729 /*@ 1730 DMLocalToGlobalBegin - updates global vectors from local vectors 1731 1732 Neighbor-wise Collective on DM 1733 1734 Input Parameters: 1735 + dm - the DM object 1736 . l - the local vector 1737 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1738 base point. 1739 - - the global vector 1740 1741 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1742 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1743 global array to the final global array with VecAXPY(). 1744 1745 Level: beginner 1746 1747 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1748 1749 @*/ 1750 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1751 { 1752 PetscSF sf; 1753 PetscErrorCode ierr; 1754 1755 PetscFunctionBegin; 1756 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1757 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1758 if (sf) { 1759 MPI_Op op; 1760 PetscScalar *lArray, *gArray; 1761 1762 switch (mode) { 1763 case INSERT_VALUES: 1764 case INSERT_ALL_VALUES: 1765 op = MPIU_REPLACE; break; 1766 case ADD_VALUES: 1767 case ADD_ALL_VALUES: 1768 op = MPI_SUM; break; 1769 default: 1770 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1771 } 1772 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1773 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1774 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1775 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1776 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1777 } else { 1778 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1779 } 1780 PetscFunctionReturn(0); 1781 } 1782 1783 #undef __FUNCT__ 1784 #define __FUNCT__ "DMLocalToGlobalEnd" 1785 /*@ 1786 DMLocalToGlobalEnd - updates global vectors from local vectors 1787 1788 Neighbor-wise Collective on DM 1789 1790 Input Parameters: 1791 + dm - the DM object 1792 . l - the local vector 1793 . mode - INSERT_VALUES or ADD_VALUES 1794 - g - the global vector 1795 1796 1797 Level: beginner 1798 1799 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1800 1801 @*/ 1802 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1803 { 1804 PetscSF sf; 1805 PetscErrorCode ierr; 1806 1807 PetscFunctionBegin; 1808 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1809 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1810 if (sf) { 1811 MPI_Op op; 1812 PetscScalar *lArray, *gArray; 1813 1814 switch (mode) { 1815 case INSERT_VALUES: 1816 case INSERT_ALL_VALUES: 1817 op = MPIU_REPLACE; break; 1818 case ADD_VALUES: 1819 case ADD_ALL_VALUES: 1820 op = MPI_SUM; break; 1821 default: 1822 SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1823 } 1824 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1825 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1826 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1827 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1828 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1829 } else { 1830 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1831 } 1832 PetscFunctionReturn(0); 1833 } 1834 1835 #undef __FUNCT__ 1836 #define __FUNCT__ "DMLocalToLocalBegin" 1837 /*@ 1838 DMLocalToLocalBegin - Maps from a local vector (including ghost points 1839 that contain irrelevant values) to another local vector where the ghost 1840 points in the second are set correctly. Must be followed by DMLocalToLocalEnd(). 1841 1842 Neighbor-wise Collective on DM and Vec 1843 1844 Input Parameters: 1845 + dm - the DM object 1846 . g - the original local vector 1847 - mode - one of INSERT_VALUES or ADD_VALUES 1848 1849 Output Parameter: 1850 . l - the local vector with correct ghost values 1851 1852 Level: intermediate 1853 1854 Notes: 1855 The local vectors used here need not be the same as those 1856 obtained from DMCreateLocalVector(), BUT they 1857 must have the same parallel data layout; they could, for example, be 1858 obtained with VecDuplicate() from the DM originating vectors. 1859 1860 .keywords: DM, local-to-local, begin 1861 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1862 1863 @*/ 1864 PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1865 { 1866 PetscErrorCode ierr; 1867 1868 PetscFunctionBegin; 1869 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1870 ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1871 PetscFunctionReturn(0); 1872 } 1873 1874 #undef __FUNCT__ 1875 #define __FUNCT__ "DMLocalToLocalEnd" 1876 /*@ 1877 DMLocalToLocalEnd - Maps from a local vector (including ghost points 1878 that contain irrelevant values) to another local vector where the ghost 1879 points in the second are set correctly. Must be preceded by DMLocalToLocalBegin(). 1880 1881 Neighbor-wise Collective on DM and Vec 1882 1883 Input Parameters: 1884 + da - the DM object 1885 . g - the original local vector 1886 - mode - one of INSERT_VALUES or ADD_VALUES 1887 1888 Output Parameter: 1889 . l - the local vector with correct ghost values 1890 1891 Level: intermediate 1892 1893 Notes: 1894 The local vectors used here need not be the same as those 1895 obtained from DMCreateLocalVector(), BUT they 1896 must have the same parallel data layout; they could, for example, be 1897 obtained with VecDuplicate() from the DM originating vectors. 1898 1899 .keywords: DM, local-to-local, end 1900 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1901 1902 @*/ 1903 PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1904 { 1905 PetscErrorCode ierr; 1906 1907 PetscFunctionBegin; 1908 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1909 ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1910 PetscFunctionReturn(0); 1911 } 1912 1913 1914 #undef __FUNCT__ 1915 #define __FUNCT__ "DMCoarsen" 1916 /*@ 1917 DMCoarsen - Coarsens a DM object 1918 1919 Collective on DM 1920 1921 Input Parameter: 1922 + dm - the DM object 1923 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1924 1925 Output Parameter: 1926 . dmc - the coarsened DM 1927 1928 Level: developer 1929 1930 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1931 1932 @*/ 1933 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1934 { 1935 PetscErrorCode ierr; 1936 DMCoarsenHookLink link; 1937 1938 PetscFunctionBegin; 1939 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1940 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1941 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1942 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1943 (*dmc)->ctx = dm->ctx; 1944 (*dmc)->levelup = dm->levelup; 1945 (*dmc)->leveldown = dm->leveldown + 1; 1946 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1947 for (link=dm->coarsenhook; link; link=link->next) { 1948 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1949 } 1950 PetscFunctionReturn(0); 1951 } 1952 1953 #undef __FUNCT__ 1954 #define __FUNCT__ "DMCoarsenHookAdd" 1955 /*@C 1956 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1957 1958 Logically Collective 1959 1960 Input Arguments: 1961 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1962 . coarsenhook - function to run when setting up a coarser level 1963 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1964 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 1965 1966 Calling sequence of coarsenhook: 1967 $ coarsenhook(DM fine,DM coarse,void *ctx); 1968 1969 + fine - fine level DM 1970 . coarse - coarse level DM to restrict problem to 1971 - ctx - optional user-defined function context 1972 1973 Calling sequence for restricthook: 1974 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1975 1976 + fine - fine level DM 1977 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1978 . rscale - scaling vector for restriction 1979 . inject - matrix restricting by injection 1980 . coarse - coarse level DM to update 1981 - ctx - optional user-defined function context 1982 1983 Level: advanced 1984 1985 Notes: 1986 This function is only needed if auxiliary data needs to be set up on coarse grids. 1987 1988 If this function is called multiple times, the hooks will be run in the order they are added. 1989 1990 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1991 extract the finest level information from its context (instead of from the SNES). 1992 1993 This function is currently not available from Fortran. 1994 1995 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1996 @*/ 1997 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1998 { 1999 PetscErrorCode ierr; 2000 DMCoarsenHookLink link,*p; 2001 2002 PetscFunctionBegin; 2003 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 2004 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2005 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 2006 link->coarsenhook = coarsenhook; 2007 link->restricthook = restricthook; 2008 link->ctx = ctx; 2009 link->next = NULL; 2010 *p = link; 2011 PetscFunctionReturn(0); 2012 } 2013 2014 #undef __FUNCT__ 2015 #define __FUNCT__ "DMRestrict" 2016 /*@ 2017 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 2018 2019 Collective if any hooks are 2020 2021 Input Arguments: 2022 + fine - finer DM to use as a base 2023 . restrct - restriction matrix, apply using MatRestrict() 2024 . inject - injection matrix, also use MatRestrict() 2025 - coarse - coarer DM to update 2026 2027 Level: developer 2028 2029 .seealso: DMCoarsenHookAdd(), MatRestrict() 2030 @*/ 2031 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 2032 { 2033 PetscErrorCode ierr; 2034 DMCoarsenHookLink link; 2035 2036 PetscFunctionBegin; 2037 for (link=fine->coarsenhook; link; link=link->next) { 2038 if (link->restricthook) { 2039 ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr); 2040 } 2041 } 2042 PetscFunctionReturn(0); 2043 } 2044 2045 #undef __FUNCT__ 2046 #define __FUNCT__ "DMSubDomainHookAdd" 2047 /*@C 2048 DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid 2049 2050 Logically Collective 2051 2052 Input Arguments: 2053 + global - global DM 2054 . ddhook - function to run to pass data to the decomposition DM upon its creation 2055 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 2056 - ctx - [optional] user-defined context for provide data for the hooks (may be NULL) 2057 2058 2059 Calling sequence for ddhook: 2060 $ ddhook(DM global,DM block,void *ctx) 2061 2062 + global - global DM 2063 . block - block DM 2064 - ctx - optional user-defined function context 2065 2066 Calling sequence for restricthook: 2067 $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx) 2068 2069 + global - global DM 2070 . out - scatter to the outer (with ghost and overlap points) block vector 2071 . in - scatter to block vector values only owned locally 2072 . block - block DM 2073 - ctx - optional user-defined function context 2074 2075 Level: advanced 2076 2077 Notes: 2078 This function is only needed if auxiliary data needs to be set up on subdomain DMs. 2079 2080 If this function is called multiple times, the hooks will be run in the order they are added. 2081 2082 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 2083 extract the global information from its context (instead of from the SNES). 2084 2085 This function is currently not available from Fortran. 2086 2087 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 2088 @*/ 2089 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 2090 { 2091 PetscErrorCode ierr; 2092 DMSubDomainHookLink link,*p; 2093 2094 PetscFunctionBegin; 2095 PetscValidHeaderSpecific(global,DM_CLASSID,1); 2096 for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 2097 ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr); 2098 link->restricthook = restricthook; 2099 link->ddhook = ddhook; 2100 link->ctx = ctx; 2101 link->next = NULL; 2102 *p = link; 2103 PetscFunctionReturn(0); 2104 } 2105 2106 #undef __FUNCT__ 2107 #define __FUNCT__ "DMSubDomainRestrict" 2108 /*@ 2109 DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd() 2110 2111 Collective if any hooks are 2112 2113 Input Arguments: 2114 + fine - finer DM to use as a base 2115 . oscatter - scatter from domain global vector filling subdomain global vector with overlap 2116 . gscatter - scatter from domain global vector filling subdomain local vector with ghosts 2117 - coarse - coarer DM to update 2118 2119 Level: developer 2120 2121 .seealso: DMCoarsenHookAdd(), MatRestrict() 2122 @*/ 2123 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm) 2124 { 2125 PetscErrorCode ierr; 2126 DMSubDomainHookLink link; 2127 2128 PetscFunctionBegin; 2129 for (link=global->subdomainhook; link; link=link->next) { 2130 if (link->restricthook) { 2131 ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr); 2132 } 2133 } 2134 PetscFunctionReturn(0); 2135 } 2136 2137 #undef __FUNCT__ 2138 #define __FUNCT__ "DMGetCoarsenLevel" 2139 /*@ 2140 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 2141 2142 Not Collective 2143 2144 Input Parameter: 2145 . dm - the DM object 2146 2147 Output Parameter: 2148 . level - number of coarsenings 2149 2150 Level: developer 2151 2152 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2153 2154 @*/ 2155 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2156 { 2157 PetscFunctionBegin; 2158 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2159 *level = dm->leveldown; 2160 PetscFunctionReturn(0); 2161 } 2162 2163 2164 2165 #undef __FUNCT__ 2166 #define __FUNCT__ "DMRefineHierarchy" 2167 /*@C 2168 DMRefineHierarchy - Refines a DM object, all levels at once 2169 2170 Collective on DM 2171 2172 Input Parameter: 2173 + dm - the DM object 2174 - nlevels - the number of levels of refinement 2175 2176 Output Parameter: 2177 . dmf - the refined DM hierarchy 2178 2179 Level: developer 2180 2181 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2182 2183 @*/ 2184 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2185 { 2186 PetscErrorCode ierr; 2187 2188 PetscFunctionBegin; 2189 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2190 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2191 if (nlevels == 0) PetscFunctionReturn(0); 2192 if (dm->ops->refinehierarchy) { 2193 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2194 } else if (dm->ops->refine) { 2195 PetscInt i; 2196 2197 ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr); 2198 for (i=1; i<nlevels; i++) { 2199 ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr); 2200 } 2201 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2202 PetscFunctionReturn(0); 2203 } 2204 2205 #undef __FUNCT__ 2206 #define __FUNCT__ "DMCoarsenHierarchy" 2207 /*@C 2208 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2209 2210 Collective on DM 2211 2212 Input Parameter: 2213 + dm - the DM object 2214 - nlevels - the number of levels of coarsening 2215 2216 Output Parameter: 2217 . dmc - the coarsened DM hierarchy 2218 2219 Level: developer 2220 2221 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2222 2223 @*/ 2224 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2225 { 2226 PetscErrorCode ierr; 2227 2228 PetscFunctionBegin; 2229 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2230 if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2231 if (nlevels == 0) PetscFunctionReturn(0); 2232 PetscValidPointer(dmc,3); 2233 if (dm->ops->coarsenhierarchy) { 2234 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2235 } else if (dm->ops->coarsen) { 2236 PetscInt i; 2237 2238 ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr); 2239 for (i=1; i<nlevels; i++) { 2240 ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr); 2241 } 2242 } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2243 PetscFunctionReturn(0); 2244 } 2245 2246 #undef __FUNCT__ 2247 #define __FUNCT__ "DMCreateAggregates" 2248 /*@ 2249 DMCreateAggregates - Gets the aggregates that map between 2250 grids associated with two DMs. 2251 2252 Collective on DM 2253 2254 Input Parameters: 2255 + dmc - the coarse grid DM 2256 - dmf - the fine grid DM 2257 2258 Output Parameters: 2259 . rest - the restriction matrix (transpose of the projection matrix) 2260 2261 Level: intermediate 2262 2263 .keywords: interpolation, restriction, multigrid 2264 2265 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2266 @*/ 2267 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2268 { 2269 PetscErrorCode ierr; 2270 2271 PetscFunctionBegin; 2272 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2273 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2274 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2275 PetscFunctionReturn(0); 2276 } 2277 2278 #undef __FUNCT__ 2279 #define __FUNCT__ "DMSetApplicationContextDestroy" 2280 /*@C 2281 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2282 2283 Not Collective 2284 2285 Input Parameters: 2286 + dm - the DM object 2287 - destroy - the destroy function 2288 2289 Level: intermediate 2290 2291 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2292 2293 @*/ 2294 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2295 { 2296 PetscFunctionBegin; 2297 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2298 dm->ctxdestroy = destroy; 2299 PetscFunctionReturn(0); 2300 } 2301 2302 #undef __FUNCT__ 2303 #define __FUNCT__ "DMSetApplicationContext" 2304 /*@ 2305 DMSetApplicationContext - Set a user context into a DM object 2306 2307 Not Collective 2308 2309 Input Parameters: 2310 + dm - the DM object 2311 - ctx - the user context 2312 2313 Level: intermediate 2314 2315 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2316 2317 @*/ 2318 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2319 { 2320 PetscFunctionBegin; 2321 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2322 dm->ctx = ctx; 2323 PetscFunctionReturn(0); 2324 } 2325 2326 #undef __FUNCT__ 2327 #define __FUNCT__ "DMGetApplicationContext" 2328 /*@ 2329 DMGetApplicationContext - Gets a user context from a DM object 2330 2331 Not Collective 2332 2333 Input Parameter: 2334 . dm - the DM object 2335 2336 Output Parameter: 2337 . ctx - the user context 2338 2339 Level: intermediate 2340 2341 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2342 2343 @*/ 2344 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2345 { 2346 PetscFunctionBegin; 2347 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2348 *(void**)ctx = dm->ctx; 2349 PetscFunctionReturn(0); 2350 } 2351 2352 #undef __FUNCT__ 2353 #define __FUNCT__ "DMSetVariableBounds" 2354 /*@C 2355 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2356 2357 Logically Collective on DM 2358 2359 Input Parameter: 2360 + dm - the DM object 2361 - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set) 2362 2363 Level: intermediate 2364 2365 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), 2366 DMSetJacobian() 2367 2368 @*/ 2369 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2370 { 2371 PetscFunctionBegin; 2372 dm->ops->computevariablebounds = f; 2373 PetscFunctionReturn(0); 2374 } 2375 2376 #undef __FUNCT__ 2377 #define __FUNCT__ "DMHasVariableBounds" 2378 /*@ 2379 DMHasVariableBounds - does the DM object have a variable bounds function? 2380 2381 Not Collective 2382 2383 Input Parameter: 2384 . dm - the DM object to destroy 2385 2386 Output Parameter: 2387 . flg - PETSC_TRUE if the variable bounds function exists 2388 2389 Level: developer 2390 2391 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2392 2393 @*/ 2394 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2395 { 2396 PetscFunctionBegin; 2397 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2398 PetscFunctionReturn(0); 2399 } 2400 2401 #undef __FUNCT__ 2402 #define __FUNCT__ "DMComputeVariableBounds" 2403 /*@C 2404 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2405 2406 Logically Collective on DM 2407 2408 Input Parameters: 2409 + dm - the DM object to destroy 2410 - x - current solution at which the bounds are computed 2411 2412 Output parameters: 2413 + xl - lower bound 2414 - xu - upper bound 2415 2416 Level: intermediate 2417 2418 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2419 2420 @*/ 2421 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2422 { 2423 PetscErrorCode ierr; 2424 2425 PetscFunctionBegin; 2426 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2427 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2428 if (dm->ops->computevariablebounds) { 2429 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr); 2430 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2431 PetscFunctionReturn(0); 2432 } 2433 2434 #undef __FUNCT__ 2435 #define __FUNCT__ "DMHasColoring" 2436 /*@ 2437 DMHasColoring - does the DM object have a method of providing a coloring? 2438 2439 Not Collective 2440 2441 Input Parameter: 2442 . dm - the DM object 2443 2444 Output Parameter: 2445 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2446 2447 Level: developer 2448 2449 .seealso DMHasFunction(), DMCreateColoring() 2450 2451 @*/ 2452 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2453 { 2454 PetscFunctionBegin; 2455 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2456 PetscFunctionReturn(0); 2457 } 2458 2459 #undef __FUNCT__ 2460 #define __FUNCT__ "DMSetVec" 2461 /*@C 2462 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2463 2464 Collective on DM 2465 2466 Input Parameter: 2467 + dm - the DM object 2468 - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems. 2469 2470 Level: developer 2471 2472 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2473 2474 @*/ 2475 PetscErrorCode DMSetVec(DM dm,Vec x) 2476 { 2477 PetscErrorCode ierr; 2478 2479 PetscFunctionBegin; 2480 if (x) { 2481 if (!dm->x) { 2482 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2483 } 2484 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2485 } else if (dm->x) { 2486 ierr = VecDestroy(&dm->x);CHKERRQ(ierr); 2487 } 2488 PetscFunctionReturn(0); 2489 } 2490 2491 PetscFunctionList DMList = NULL; 2492 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2493 2494 #undef __FUNCT__ 2495 #define __FUNCT__ "DMSetType" 2496 /*@C 2497 DMSetType - Builds a DM, for a particular DM implementation. 2498 2499 Collective on DM 2500 2501 Input Parameters: 2502 + dm - The DM object 2503 - method - The name of the DM type 2504 2505 Options Database Key: 2506 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2507 2508 Notes: 2509 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2510 2511 Level: intermediate 2512 2513 .keywords: DM, set, type 2514 .seealso: DMGetType(), DMCreate() 2515 @*/ 2516 PetscErrorCode DMSetType(DM dm, DMType method) 2517 { 2518 PetscErrorCode (*r)(DM); 2519 PetscBool match; 2520 PetscErrorCode ierr; 2521 2522 PetscFunctionBegin; 2523 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2524 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2525 if (match) PetscFunctionReturn(0); 2526 2527 if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);} 2528 ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr); 2529 if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2530 2531 if (dm->ops->destroy) { 2532 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2533 dm->ops->destroy = NULL; 2534 } 2535 ierr = (*r)(dm);CHKERRQ(ierr); 2536 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2537 PetscFunctionReturn(0); 2538 } 2539 2540 #undef __FUNCT__ 2541 #define __FUNCT__ "DMGetType" 2542 /*@C 2543 DMGetType - Gets the DM type name (as a string) from the DM. 2544 2545 Not Collective 2546 2547 Input Parameter: 2548 . dm - The DM 2549 2550 Output Parameter: 2551 . type - The DM type name 2552 2553 Level: intermediate 2554 2555 .keywords: DM, get, type, name 2556 .seealso: DMSetType(), DMCreate() 2557 @*/ 2558 PetscErrorCode DMGetType(DM dm, DMType *type) 2559 { 2560 PetscErrorCode ierr; 2561 2562 PetscFunctionBegin; 2563 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2564 PetscValidCharPointer(type,2); 2565 if (!DMRegisterAllCalled) { 2566 ierr = DMRegisterAll();CHKERRQ(ierr); 2567 } 2568 *type = ((PetscObject)dm)->type_name; 2569 PetscFunctionReturn(0); 2570 } 2571 2572 #undef __FUNCT__ 2573 #define __FUNCT__ "DMConvert" 2574 /*@C 2575 DMConvert - Converts a DM to another DM, either of the same or different type. 2576 2577 Collective on DM 2578 2579 Input Parameters: 2580 + dm - the DM 2581 - newtype - new DM type (use "same" for the same type) 2582 2583 Output Parameter: 2584 . M - pointer to new DM 2585 2586 Notes: 2587 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2588 the MPI communicator of the generated DM is always the same as the communicator 2589 of the input DM. 2590 2591 Level: intermediate 2592 2593 .seealso: DMCreate() 2594 @*/ 2595 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2596 { 2597 DM B; 2598 char convname[256]; 2599 PetscBool sametype, issame; 2600 PetscErrorCode ierr; 2601 2602 PetscFunctionBegin; 2603 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2604 PetscValidType(dm,1); 2605 PetscValidPointer(M,3); 2606 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2607 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2608 { 2609 PetscErrorCode (*conv)(DM, DMType, DM*) = NULL; 2610 2611 /* 2612 Order of precedence: 2613 1) See if a specialized converter is known to the current DM. 2614 2) See if a specialized converter is known to the desired DM class. 2615 3) See if a good general converter is registered for the desired class 2616 4) See if a good general converter is known for the current matrix. 2617 5) Use a really basic converter. 2618 */ 2619 2620 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2621 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2622 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2623 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2624 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2625 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2626 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr); 2627 if (conv) goto foundconv; 2628 2629 /* 2) See if a specialized converter is known to the desired DM class. */ 2630 ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr); 2631 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2632 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2633 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2634 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2635 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2636 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2637 ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr); 2638 if (conv) { 2639 ierr = DMDestroy(&B);CHKERRQ(ierr); 2640 goto foundconv; 2641 } 2642 2643 #if 0 2644 /* 3) See if a good general converter is registered for the desired class */ 2645 conv = B->ops->convertfrom; 2646 ierr = DMDestroy(&B);CHKERRQ(ierr); 2647 if (conv) goto foundconv; 2648 2649 /* 4) See if a good general converter is known for the current matrix */ 2650 if (dm->ops->convert) { 2651 conv = dm->ops->convert; 2652 } 2653 if (conv) goto foundconv; 2654 #endif 2655 2656 /* 5) Use a really basic converter. */ 2657 SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2658 2659 foundconv: 2660 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2661 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2662 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2663 } 2664 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2665 PetscFunctionReturn(0); 2666 } 2667 2668 /*--------------------------------------------------------------------------------------------------------------------*/ 2669 2670 #undef __FUNCT__ 2671 #define __FUNCT__ "DMRegister" 2672 /*@C 2673 DMRegister - Adds a new DM component implementation 2674 2675 Not Collective 2676 2677 Input Parameters: 2678 + name - The name of a new user-defined creation routine 2679 - create_func - The creation routine itself 2680 2681 Notes: 2682 DMRegister() may be called multiple times to add several user-defined DMs 2683 2684 2685 Sample usage: 2686 .vb 2687 DMRegister("my_da", MyDMCreate); 2688 .ve 2689 2690 Then, your DM type can be chosen with the procedural interface via 2691 .vb 2692 DMCreate(MPI_Comm, DM *); 2693 DMSetType(DM,"my_da"); 2694 .ve 2695 or at runtime via the option 2696 .vb 2697 -da_type my_da 2698 .ve 2699 2700 Level: advanced 2701 2702 .keywords: DM, register 2703 .seealso: DMRegisterAll(), DMRegisterDestroy() 2704 2705 @*/ 2706 PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM)) 2707 { 2708 PetscErrorCode ierr; 2709 2710 PetscFunctionBegin; 2711 ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr); 2712 PetscFunctionReturn(0); 2713 } 2714 2715 #undef __FUNCT__ 2716 #define __FUNCT__ "DMLoad" 2717 /*@C 2718 DMLoad - Loads a DM that has been stored in binary with DMView(). 2719 2720 Collective on PetscViewer 2721 2722 Input Parameters: 2723 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2724 some related function before a call to DMLoad(). 2725 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2726 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2727 2728 Level: intermediate 2729 2730 Notes: 2731 The type is determined by the data in the file, any type set into the DM before this call is ignored. 2732 2733 Notes for advanced users: 2734 Most users should not need to know the details of the binary storage 2735 format, since DMLoad() and DMView() completely hide these details. 2736 But for anyone who's interested, the standard binary matrix storage 2737 format is 2738 .vb 2739 has not yet been determined 2740 .ve 2741 2742 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2743 @*/ 2744 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2745 { 2746 PetscErrorCode ierr; 2747 PetscBool isbinary; 2748 PetscInt classid; 2749 char type[256]; 2750 2751 PetscFunctionBegin; 2752 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2753 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2754 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 2755 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 2756 2757 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 2758 if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file"); 2759 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 2760 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 2761 if (newdm->ops->load) { 2762 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2763 } 2764 PetscFunctionReturn(0); 2765 } 2766 2767 /******************************** FEM Support **********************************/ 2768 2769 #undef __FUNCT__ 2770 #define __FUNCT__ "DMPrintCellVector" 2771 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) 2772 { 2773 PetscInt f; 2774 PetscErrorCode ierr; 2775 2776 PetscFunctionBegin; 2777 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2778 for (f = 0; f < len; ++f) { 2779 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2780 } 2781 PetscFunctionReturn(0); 2782 } 2783 2784 #undef __FUNCT__ 2785 #define __FUNCT__ "DMPrintCellMatrix" 2786 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) 2787 { 2788 PetscInt f, g; 2789 PetscErrorCode ierr; 2790 2791 PetscFunctionBegin; 2792 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2793 for (f = 0; f < rows; ++f) { 2794 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2795 for (g = 0; g < cols; ++g) { 2796 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2797 } 2798 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2799 } 2800 PetscFunctionReturn(0); 2801 } 2802 2803 #undef __FUNCT__ 2804 #define __FUNCT__ "DMGetDefaultSection" 2805 /*@ 2806 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2807 2808 Input Parameter: 2809 . dm - The DM 2810 2811 Output Parameter: 2812 . section - The PetscSection 2813 2814 Level: intermediate 2815 2816 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2817 2818 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2819 @*/ 2820 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) 2821 { 2822 PetscFunctionBegin; 2823 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2824 PetscValidPointer(section, 2); 2825 *section = dm->defaultSection; 2826 PetscFunctionReturn(0); 2827 } 2828 2829 #undef __FUNCT__ 2830 #define __FUNCT__ "DMSetDefaultSection" 2831 /*@ 2832 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2833 2834 Input Parameters: 2835 + dm - The DM 2836 - section - The PetscSection 2837 2838 Level: intermediate 2839 2840 Note: Any existing Section will be destroyed 2841 2842 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2843 @*/ 2844 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) 2845 { 2846 PetscInt numFields; 2847 PetscInt f; 2848 PetscErrorCode ierr; 2849 2850 PetscFunctionBegin; 2851 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2852 PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2853 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2854 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2855 dm->defaultSection = section; 2856 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 2857 if (numFields) { 2858 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 2859 for (f = 0; f < numFields; ++f) { 2860 const char *name; 2861 2862 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 2863 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 2864 } 2865 } 2866 /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */ 2867 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2868 PetscFunctionReturn(0); 2869 } 2870 2871 #undef __FUNCT__ 2872 #define __FUNCT__ "DMGetDefaultGlobalSection" 2873 /*@ 2874 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2875 2876 Collective on DM 2877 2878 Input Parameter: 2879 . dm - The DM 2880 2881 Output Parameter: 2882 . section - The PetscSection 2883 2884 Level: intermediate 2885 2886 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2887 2888 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2889 @*/ 2890 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) 2891 { 2892 PetscErrorCode ierr; 2893 2894 PetscFunctionBegin; 2895 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2896 PetscValidPointer(section, 2); 2897 if (!dm->defaultGlobalSection) { 2898 if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 2899 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 2900 ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr); 2901 ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 2902 } 2903 *section = dm->defaultGlobalSection; 2904 PetscFunctionReturn(0); 2905 } 2906 2907 #undef __FUNCT__ 2908 #define __FUNCT__ "DMSetDefaultGlobalSection" 2909 /*@ 2910 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 2911 2912 Input Parameters: 2913 + dm - The DM 2914 - section - The PetscSection, or NULL 2915 2916 Level: intermediate 2917 2918 Note: Any existing Section will be destroyed 2919 2920 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 2921 @*/ 2922 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) 2923 { 2924 PetscErrorCode ierr; 2925 2926 PetscFunctionBegin; 2927 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2928 if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2); 2929 ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr); 2930 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2931 dm->defaultGlobalSection = section; 2932 PetscFunctionReturn(0); 2933 } 2934 2935 #undef __FUNCT__ 2936 #define __FUNCT__ "DMGetDefaultSF" 2937 /*@ 2938 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2939 it is created from the default PetscSection layouts in the DM. 2940 2941 Input Parameter: 2942 . dm - The DM 2943 2944 Output Parameter: 2945 . sf - The PetscSF 2946 2947 Level: intermediate 2948 2949 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 2950 2951 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 2952 @*/ 2953 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) 2954 { 2955 PetscInt nroots; 2956 PetscErrorCode ierr; 2957 2958 PetscFunctionBegin; 2959 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2960 PetscValidPointer(sf, 2); 2961 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr); 2962 if (nroots < 0) { 2963 PetscSection section, gSection; 2964 2965 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 2966 if (section) { 2967 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 2968 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 2969 } else { 2970 *sf = NULL; 2971 PetscFunctionReturn(0); 2972 } 2973 } 2974 *sf = dm->defaultSF; 2975 PetscFunctionReturn(0); 2976 } 2977 2978 #undef __FUNCT__ 2979 #define __FUNCT__ "DMSetDefaultSF" 2980 /*@ 2981 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 2982 2983 Input Parameters: 2984 + dm - The DM 2985 - sf - The PetscSF 2986 2987 Level: intermediate 2988 2989 Note: Any previous SF is destroyed 2990 2991 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 2992 @*/ 2993 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) 2994 { 2995 PetscErrorCode ierr; 2996 2997 PetscFunctionBegin; 2998 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2999 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3000 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3001 dm->defaultSF = sf; 3002 PetscFunctionReturn(0); 3003 } 3004 3005 #undef __FUNCT__ 3006 #define __FUNCT__ "DMCreateDefaultSF" 3007 /*@C 3008 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3009 describing the data layout. 3010 3011 Input Parameters: 3012 + dm - The DM 3013 . localSection - PetscSection describing the local data layout 3014 - globalSection - PetscSection describing the global data layout 3015 3016 Level: intermediate 3017 3018 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3019 @*/ 3020 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3021 { 3022 MPI_Comm comm; 3023 PetscLayout layout; 3024 const PetscInt *ranges; 3025 PetscInt *local; 3026 PetscSFNode *remote; 3027 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3028 PetscMPIInt size, rank; 3029 PetscErrorCode ierr; 3030 3031 PetscFunctionBegin; 3032 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 3033 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3034 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3035 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3036 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3037 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3038 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3039 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3040 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3041 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3042 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3043 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 3044 PetscInt gdof, gcdof; 3045 3046 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3047 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3048 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3049 } 3050 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3051 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3052 for (p = pStart, l = 0; p < pEnd; ++p) { 3053 const PetscInt *cind; 3054 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3055 3056 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3057 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3058 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3059 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3060 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3061 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3062 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3063 if (!gdof) continue; /* Censored point */ 3064 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3065 if (gsize != dof-cdof) { 3066 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3067 cdof = 0; /* Ignore constraints */ 3068 } 3069 for (d = 0, c = 0; d < dof; ++d) { 3070 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3071 local[l+d-c] = off+d; 3072 } 3073 if (gdof < 0) { 3074 for (d = 0; d < gsize; ++d, ++l) { 3075 PetscInt offset = -(goff+1) + d, r; 3076 3077 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 3078 if (r < 0) r = -(r+2); 3079 remote[l].rank = r; 3080 remote[l].index = offset - ranges[r]; 3081 } 3082 } else { 3083 for (d = 0; d < gsize; ++d, ++l) { 3084 remote[l].rank = rank; 3085 remote[l].index = goff+d - ranges[rank]; 3086 } 3087 } 3088 } 3089 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3090 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3091 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3092 PetscFunctionReturn(0); 3093 } 3094 3095 #undef __FUNCT__ 3096 #define __FUNCT__ "DMGetPointSF" 3097 /*@ 3098 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3099 3100 Input Parameter: 3101 . dm - The DM 3102 3103 Output Parameter: 3104 . sf - The PetscSF 3105 3106 Level: intermediate 3107 3108 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3109 3110 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3111 @*/ 3112 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) 3113 { 3114 PetscFunctionBegin; 3115 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3116 PetscValidPointer(sf, 2); 3117 *sf = dm->sf; 3118 PetscFunctionReturn(0); 3119 } 3120 3121 #undef __FUNCT__ 3122 #define __FUNCT__ "DMSetPointSF" 3123 /*@ 3124 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3125 3126 Input Parameters: 3127 + dm - The DM 3128 - sf - The PetscSF 3129 3130 Level: intermediate 3131 3132 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3133 @*/ 3134 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) 3135 { 3136 PetscErrorCode ierr; 3137 3138 PetscFunctionBegin; 3139 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3140 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3141 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3142 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3143 dm->sf = sf; 3144 PetscFunctionReturn(0); 3145 } 3146 3147 #undef __FUNCT__ 3148 #define __FUNCT__ "DMGetNumFields" 3149 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3150 { 3151 PetscFunctionBegin; 3152 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3153 PetscValidPointer(numFields, 2); 3154 *numFields = dm->numFields; 3155 PetscFunctionReturn(0); 3156 } 3157 3158 #undef __FUNCT__ 3159 #define __FUNCT__ "DMSetNumFields" 3160 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3161 { 3162 PetscInt f; 3163 PetscErrorCode ierr; 3164 3165 PetscFunctionBegin; 3166 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3167 for (f = 0; f < dm->numFields; ++f) { 3168 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3169 } 3170 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3171 dm->numFields = numFields; 3172 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3173 for (f = 0; f < dm->numFields; ++f) { 3174 ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr); 3175 } 3176 PetscFunctionReturn(0); 3177 } 3178 3179 #undef __FUNCT__ 3180 #define __FUNCT__ "DMGetField" 3181 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3182 { 3183 PetscFunctionBegin; 3184 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3185 PetscValidPointer(field, 2); 3186 if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3187 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3188 *field = dm->fields[f]; 3189 PetscFunctionReturn(0); 3190 } 3191 3192 #undef __FUNCT__ 3193 #define __FUNCT__ "DMRestrictHook_Coordinates" 3194 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx) 3195 { 3196 DM dm_coord,dmc_coord; 3197 PetscErrorCode ierr; 3198 Vec coords,ccoords; 3199 VecScatter scat; 3200 PetscFunctionBegin; 3201 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3202 ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr); 3203 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3204 ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr); 3205 if (coords && !ccoords) { 3206 ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr); 3207 ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr); 3208 ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3209 ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3210 ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr); 3211 ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); 3212 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3213 } 3214 PetscFunctionReturn(0); 3215 } 3216 3217 #undef __FUNCT__ 3218 #define __FUNCT__ "DMSubDomainHook_Coordinates" 3219 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx) 3220 { 3221 DM dm_coord,subdm_coord; 3222 PetscErrorCode ierr; 3223 Vec coords,ccoords,clcoords; 3224 VecScatter *scat_i,*scat_g; 3225 PetscFunctionBegin; 3226 ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr); 3227 ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr); 3228 ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr); 3229 ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr); 3230 if (coords && !ccoords) { 3231 ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr); 3232 ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr); 3233 ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr); 3234 ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3235 ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3236 ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3237 ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 3238 ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr); 3239 ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr); 3240 ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr); 3241 ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr); 3242 ierr = VecDestroy(&ccoords);CHKERRQ(ierr); 3243 ierr = VecDestroy(&clcoords);CHKERRQ(ierr); 3244 ierr = PetscFree(scat_i);CHKERRQ(ierr); 3245 ierr = PetscFree(scat_g);CHKERRQ(ierr); 3246 } 3247 PetscFunctionReturn(0); 3248 } 3249 3250 #undef __FUNCT__ 3251 #define __FUNCT__ "DMSetCoordinates" 3252 /*@ 3253 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3254 3255 Collective on DM 3256 3257 Input Parameters: 3258 + dm - the DM 3259 - c - coordinate vector 3260 3261 Note: 3262 The coordinates do include those for ghost points, which are in the local vector 3263 3264 Level: intermediate 3265 3266 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3267 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3268 @*/ 3269 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3270 { 3271 PetscErrorCode ierr; 3272 3273 PetscFunctionBegin; 3274 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3275 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3276 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3277 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3278 dm->coordinates = c; 3279 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3280 ierr = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3281 ierr = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr); 3282 PetscFunctionReturn(0); 3283 } 3284 3285 #undef __FUNCT__ 3286 #define __FUNCT__ "DMSetCoordinatesLocal" 3287 /*@ 3288 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3289 3290 Collective on DM 3291 3292 Input Parameters: 3293 + dm - the DM 3294 - c - coordinate vector 3295 3296 Note: 3297 The coordinates of ghost points can be set using DMSetCoordinates() 3298 followed by DMGetCoordinatesLocal(). This is intended to enable the 3299 setting of ghost coordinates outside of the domain. 3300 3301 Level: intermediate 3302 3303 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3304 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3305 @*/ 3306 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3307 { 3308 PetscErrorCode ierr; 3309 3310 PetscFunctionBegin; 3311 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3312 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3313 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3314 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3315 3316 dm->coordinatesLocal = c; 3317 3318 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3319 PetscFunctionReturn(0); 3320 } 3321 3322 #undef __FUNCT__ 3323 #define __FUNCT__ "DMGetCoordinates" 3324 /*@ 3325 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3326 3327 Not Collective 3328 3329 Input Parameter: 3330 . dm - the DM 3331 3332 Output Parameter: 3333 . c - global coordinate vector 3334 3335 Note: 3336 This is a borrowed reference, so the user should NOT destroy this vector 3337 3338 Each process has only the local coordinates (does NOT have the ghost coordinates). 3339 3340 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3341 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3342 3343 Level: intermediate 3344 3345 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3346 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3347 @*/ 3348 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3349 { 3350 PetscErrorCode ierr; 3351 3352 PetscFunctionBegin; 3353 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3354 PetscValidPointer(c,2); 3355 if (!dm->coordinates && dm->coordinatesLocal) { 3356 DM cdm = NULL; 3357 3358 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3359 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3360 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3361 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3362 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3363 } 3364 *c = dm->coordinates; 3365 PetscFunctionReturn(0); 3366 } 3367 3368 #undef __FUNCT__ 3369 #define __FUNCT__ "DMGetCoordinatesLocal" 3370 /*@ 3371 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3372 3373 Collective on DM 3374 3375 Input Parameter: 3376 . dm - the DM 3377 3378 Output Parameter: 3379 . c - coordinate vector 3380 3381 Note: 3382 This is a borrowed reference, so the user should NOT destroy this vector 3383 3384 Each process has the local and ghost coordinates 3385 3386 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3387 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3388 3389 Level: intermediate 3390 3391 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3392 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3393 @*/ 3394 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3395 { 3396 PetscErrorCode ierr; 3397 3398 PetscFunctionBegin; 3399 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3400 PetscValidPointer(c,2); 3401 if (!dm->coordinatesLocal && dm->coordinates) { 3402 DM cdm = NULL; 3403 3404 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3405 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3406 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3407 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3408 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3409 } 3410 *c = dm->coordinatesLocal; 3411 PetscFunctionReturn(0); 3412 } 3413 3414 #undef __FUNCT__ 3415 #define __FUNCT__ "DMGetCoordinateDM" 3416 /*@ 3417 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3418 3419 Collective on DM 3420 3421 Input Parameter: 3422 . dm - the DM 3423 3424 Output Parameter: 3425 . cdm - coordinate DM 3426 3427 Level: intermediate 3428 3429 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3430 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3431 @*/ 3432 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3433 { 3434 PetscErrorCode ierr; 3435 3436 PetscFunctionBegin; 3437 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3438 PetscValidPointer(cdm,2); 3439 if (!dm->coordinateDM) { 3440 if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3441 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3442 } 3443 *cdm = dm->coordinateDM; 3444 PetscFunctionReturn(0); 3445 } 3446 3447 #undef __FUNCT__ 3448 #define __FUNCT__ "DMLocatePoints" 3449 /*@ 3450 DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells 3451 3452 Not collective 3453 3454 Input Parameters: 3455 + dm - The DM 3456 - v - The Vec of points 3457 3458 Output Parameter: 3459 . cells - The local cell numbers for cells which contain the points 3460 3461 Level: developer 3462 3463 .keywords: point location, mesh 3464 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3465 @*/ 3466 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells) 3467 { 3468 PetscErrorCode ierr; 3469 3470 PetscFunctionBegin; 3471 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3472 PetscValidHeaderSpecific(v,VEC_CLASSID,2); 3473 PetscValidPointer(cells,3); 3474 if (dm->ops->locatepoints) { 3475 ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr); 3476 } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM"); 3477 PetscFunctionReturn(0); 3478 } 3479