1 #include <petscsnes.h> 2 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = PETSC_NULL; 35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 36 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 39 #endif 40 41 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 42 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 43 44 45 v->workSize = 0; 46 v->workArray = PETSC_NULL; 47 v->ltogmap = PETSC_NULL; 48 v->ltogmapb = PETSC_NULL; 49 v->bs = 1; 50 v->coloringtype = IS_COLORING_GLOBAL; 51 v->lf = PETSC_NULL; 52 v->lj = PETSC_NULL; 53 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 54 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 55 v->defaultSection = PETSC_NULL; 56 v->defaultGlobalSection = PETSC_NULL; 57 { 58 PetscInt i; 59 for(i = 0; i < 10; ++i) { 60 v->nullspaceConstructors[i] = PETSC_NULL; 61 } 62 } 63 64 *dm = v; 65 PetscFunctionReturn(0); 66 } 67 68 69 #undef __FUNCT__ 70 #define __FUNCT__ "DMSetVecType" 71 /*@C 72 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 73 74 Logically Collective on DMDA 75 76 Input Parameter: 77 + da - initial distributed array 78 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 79 80 Options Database: 81 . -dm_vec_type ctype 82 83 Level: intermediate 84 85 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 86 @*/ 87 PetscErrorCode DMSetVecType(DM da,const VecType ctype) 88 { 89 PetscErrorCode ierr; 90 91 PetscFunctionBegin; 92 PetscValidHeaderSpecific(da,DM_CLASSID,1); 93 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 94 ierr = PetscStrallocpy(ctype,&da->vectype);CHKERRQ(ierr); 95 PetscFunctionReturn(0); 96 } 97 98 #undef __FUNCT__ 99 #define __FUNCT__ "DMSetMatType" 100 /*@C 101 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 102 103 Logically Collective on DM 104 105 Input Parameter: 106 + dm - the DM context 107 . ctype - the matrix type 108 109 Options Database: 110 . -dm_mat_type ctype 111 112 Level: intermediate 113 114 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 115 @*/ 116 PetscErrorCode DMSetMatType(DM dm,const MatType ctype) 117 { 118 PetscErrorCode ierr; 119 PetscFunctionBegin; 120 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 121 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 122 ierr = PetscStrallocpy(ctype,&dm->mattype);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 126 #undef __FUNCT__ 127 #define __FUNCT__ "DMSetOptionsPrefix" 128 /*@C 129 DMSetOptionsPrefix - Sets the prefix used for searching for all 130 DMDA options in the database. 131 132 Logically Collective on DMDA 133 134 Input Parameter: 135 + da - the DMDA context 136 - prefix - the prefix to prepend to all option names 137 138 Notes: 139 A hyphen (-) must NOT be given at the beginning of the prefix name. 140 The first character of all runtime options is AUTOMATICALLY the hyphen. 141 142 Level: advanced 143 144 .keywords: DMDA, set, options, prefix, database 145 146 .seealso: DMSetFromOptions() 147 @*/ 148 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 149 { 150 PetscErrorCode ierr; 151 152 PetscFunctionBegin; 153 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 154 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 155 PetscFunctionReturn(0); 156 } 157 158 #undef __FUNCT__ 159 #define __FUNCT__ "DMDestroy" 160 /*@ 161 DMDestroy - Destroys a vector packer or DMDA. 162 163 Collective on DM 164 165 Input Parameter: 166 . dm - the DM object to destroy 167 168 Level: developer 169 170 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 171 172 @*/ 173 PetscErrorCode DMDestroy(DM *dm) 174 { 175 PetscInt i, cnt = 0; 176 DMNamedVecLink nlink,nnext; 177 PetscErrorCode ierr; 178 179 PetscFunctionBegin; 180 if (!*dm) PetscFunctionReturn(0); 181 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 182 183 /* count all the circular references of DM and its contained Vecs */ 184 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 185 if ((*dm)->localin[i]) {cnt++;} 186 if ((*dm)->globalin[i]) {cnt++;} 187 } 188 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 189 if ((*dm)->x) { 190 PetscObject obj; 191 ierr = PetscObjectQuery((PetscObject)(*dm)->x,"DM",&obj);CHKERRQ(ierr); 192 if (obj == (PetscObject)*dm) cnt++; 193 } 194 195 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 196 /* 197 Need this test because the dm references the vectors that 198 reference the dm, so destroying the dm calls destroy on the 199 vectors that cause another destroy on the dm 200 */ 201 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 202 ((PetscObject) (*dm))->refct = 0; 203 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 204 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 205 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 206 } 207 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 208 nnext = nlink->next; 209 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 210 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 211 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 212 ierr = PetscFree(nlink);CHKERRQ(ierr); 213 } 214 (*dm)->namedglobal = PETSC_NULL; 215 216 /* Destroy the list of hooks */ 217 { 218 DMCoarsenHookLink link,next; 219 for (link=(*dm)->coarsenhook; link; link=next) { 220 next = link->next; 221 ierr = PetscFree(link);CHKERRQ(ierr); 222 } 223 (*dm)->coarsenhook = PETSC_NULL; 224 } 225 { 226 DMRefineHookLink link,next; 227 for (link=(*dm)->refinehook; link; link=next) { 228 next = link->next; 229 ierr = PetscFree(link);CHKERRQ(ierr); 230 } 231 (*dm)->refinehook = PETSC_NULL; 232 } 233 234 if ((*dm)->ctx && (*dm)->ctxdestroy) { 235 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 236 } 237 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 238 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 239 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 240 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 241 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 242 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 243 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 244 ierr = PetscFree((*dm)->workArray);CHKERRQ(ierr); 245 246 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 247 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 248 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 249 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 250 /* if memory was published with AMS then destroy it */ 251 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 252 253 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 254 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 255 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 256 PetscFunctionReturn(0); 257 } 258 259 #undef __FUNCT__ 260 #define __FUNCT__ "DMSetUp" 261 /*@ 262 DMSetUp - sets up the data structures inside a DM object 263 264 Collective on DM 265 266 Input Parameter: 267 . dm - the DM object to setup 268 269 Level: developer 270 271 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 272 273 @*/ 274 PetscErrorCode DMSetUp(DM dm) 275 { 276 PetscErrorCode ierr; 277 278 PetscFunctionBegin; 279 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 280 if (dm->setupcalled) PetscFunctionReturn(0); 281 if (dm->ops->setup) { 282 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 283 } 284 dm->setupcalled = PETSC_TRUE; 285 PetscFunctionReturn(0); 286 } 287 288 #undef __FUNCT__ 289 #define __FUNCT__ "DMSetFromOptions" 290 /*@ 291 DMSetFromOptions - sets parameters in a DM from the options database 292 293 Collective on DM 294 295 Input Parameter: 296 . dm - the DM object to set options for 297 298 Options Database: 299 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 300 . -dm_vec_type <type> type of vector to create inside DM 301 . -dm_mat_type <type> type of matrix to create inside DM 302 - -dm_coloring_type <global or ghosted> 303 304 Level: developer 305 306 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 307 308 @*/ 309 PetscErrorCode DMSetFromOptions(DM dm) 310 { 311 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 312 PetscErrorCode ierr; 313 char typeName[256] = MATAIJ; 314 315 PetscFunctionBegin; 316 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 317 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 318 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 319 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 320 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 321 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 322 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 323 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 324 if (flg) { 325 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 326 } 327 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof typeName,&flg);CHKERRQ(ierr); 328 if (flg) { 329 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 330 } 331 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 332 if (dm->ops->setfromoptions) { 333 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 334 } 335 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 336 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 337 ierr = PetscOptionsEnd();CHKERRQ(ierr); 338 if (flg1) { 339 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 340 } 341 if (flg2) { 342 PetscViewer viewer; 343 344 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 345 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 346 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 347 ierr = DMView(dm, viewer);CHKERRQ(ierr); 348 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 349 } 350 if (flg3) { 351 PetscViewer viewer; 352 353 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 354 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 355 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 356 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 357 ierr = DMView(dm, viewer);CHKERRQ(ierr); 358 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 359 } 360 if (flg4) { 361 PetscViewer viewer; 362 363 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 364 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 365 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 366 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 367 ierr = DMView(dm, viewer);CHKERRQ(ierr); 368 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 369 } 370 PetscFunctionReturn(0); 371 } 372 373 #undef __FUNCT__ 374 #define __FUNCT__ "DMView" 375 /*@C 376 DMView - Views a vector packer or DMDA. 377 378 Collective on DM 379 380 Input Parameter: 381 + dm - the DM object to view 382 - v - the viewer 383 384 Level: developer 385 386 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 387 388 @*/ 389 PetscErrorCode DMView(DM dm,PetscViewer v) 390 { 391 PetscErrorCode ierr; 392 393 PetscFunctionBegin; 394 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 395 if (!v) { 396 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 397 } 398 if (dm->ops->view) { 399 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 400 } 401 PetscFunctionReturn(0); 402 } 403 404 #undef __FUNCT__ 405 #define __FUNCT__ "DMCreateGlobalVector" 406 /*@ 407 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 408 409 Collective on DM 410 411 Input Parameter: 412 . dm - the DM object 413 414 Output Parameter: 415 . vec - the global vector 416 417 Level: beginner 418 419 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 420 421 @*/ 422 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 423 { 424 PetscErrorCode ierr; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 428 if (dm->defaultSection) { 429 PetscSection gSection; 430 PetscInt localSize; 431 432 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 433 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 434 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 435 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 436 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 437 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 438 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 439 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 440 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 441 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 442 } else { 443 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 444 } 445 PetscFunctionReturn(0); 446 } 447 448 #undef __FUNCT__ 449 #define __FUNCT__ "DMCreateLocalVector" 450 /*@ 451 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 452 453 Not Collective 454 455 Input Parameter: 456 . dm - the DM object 457 458 Output Parameter: 459 . vec - the local vector 460 461 Level: beginner 462 463 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 464 465 @*/ 466 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 467 { 468 PetscErrorCode ierr; 469 470 PetscFunctionBegin; 471 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 472 if (dm->defaultSection) { 473 PetscInt localSize; 474 475 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 476 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 477 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 478 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 479 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 480 } else { 481 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 482 } 483 PetscFunctionReturn(0); 484 } 485 486 #undef __FUNCT__ 487 #define __FUNCT__ "DMGetLocalToGlobalMapping" 488 /*@ 489 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 490 491 Collective on DM 492 493 Input Parameter: 494 . dm - the DM that provides the mapping 495 496 Output Parameter: 497 . ltog - the mapping 498 499 Level: intermediate 500 501 Notes: 502 This mapping can then be used by VecSetLocalToGlobalMapping() or 503 MatSetLocalToGlobalMapping(). 504 505 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 506 @*/ 507 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 508 { 509 PetscErrorCode ierr; 510 511 PetscFunctionBegin; 512 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 513 PetscValidPointer(ltog,2); 514 if (!dm->ltogmap) { 515 PetscSection section, sectionGlobal; 516 517 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 518 if (section) { 519 PetscInt *ltog; 520 PetscInt pStart, pEnd, size, p, l; 521 522 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 523 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 524 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 525 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 526 for(p = pStart, l = 0; p < pEnd; ++p) { 527 PetscInt dof, off, c; 528 529 /* Should probably use constrained dofs */ 530 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 531 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 532 for(c = 0; c < dof; ++c, ++l) { 533 ltog[l] = off+c; 534 } 535 } 536 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 537 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 538 } else { 539 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 540 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 541 } 542 } 543 *ltog = dm->ltogmap; 544 PetscFunctionReturn(0); 545 } 546 547 #undef __FUNCT__ 548 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 549 /*@ 550 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 551 552 Collective on DM 553 554 Input Parameter: 555 . da - the distributed array that provides the mapping 556 557 Output Parameter: 558 . ltog - the block mapping 559 560 Level: intermediate 561 562 Notes: 563 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 564 MatSetLocalToGlobalMappingBlock(). 565 566 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 567 @*/ 568 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 569 { 570 PetscErrorCode ierr; 571 572 PetscFunctionBegin; 573 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 574 PetscValidPointer(ltog,2); 575 if (!dm->ltogmapb) { 576 PetscInt bs; 577 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 578 if (bs > 1) { 579 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 580 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 581 } else { 582 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 583 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 584 } 585 } 586 *ltog = dm->ltogmapb; 587 PetscFunctionReturn(0); 588 } 589 590 #undef __FUNCT__ 591 #define __FUNCT__ "DMGetBlockSize" 592 /*@ 593 DMGetBlockSize - Gets the inherent block size associated with a DM 594 595 Not Collective 596 597 Input Parameter: 598 . dm - the DM with block structure 599 600 Output Parameter: 601 . bs - the block size, 1 implies no exploitable block structure 602 603 Level: intermediate 604 605 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 606 @*/ 607 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 608 { 609 PetscFunctionBegin; 610 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 611 PetscValidPointer(bs,2); 612 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 613 *bs = dm->bs; 614 PetscFunctionReturn(0); 615 } 616 617 #undef __FUNCT__ 618 #define __FUNCT__ "DMCreateInterpolation" 619 /*@ 620 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 621 622 Collective on DM 623 624 Input Parameter: 625 + dm1 - the DM object 626 - dm2 - the second, finer DM object 627 628 Output Parameter: 629 + mat - the interpolation 630 - vec - the scaling (optional) 631 632 Level: developer 633 634 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 635 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 636 637 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMDAGetCoordinateDA()) to interpolate the mesh coordinate vectors 638 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 639 640 641 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 642 643 @*/ 644 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 645 { 646 PetscErrorCode ierr; 647 648 PetscFunctionBegin; 649 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 650 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 651 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 652 PetscFunctionReturn(0); 653 } 654 655 #undef __FUNCT__ 656 #define __FUNCT__ "DMCreateInjection" 657 /*@ 658 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 659 660 Collective on DM 661 662 Input Parameter: 663 + dm1 - the DM object 664 - dm2 - the second, finer DM object 665 666 Output Parameter: 667 . ctx - the injection 668 669 Level: developer 670 671 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 672 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 673 674 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 675 676 @*/ 677 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 678 { 679 PetscErrorCode ierr; 680 681 PetscFunctionBegin; 682 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 683 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 684 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 685 PetscFunctionReturn(0); 686 } 687 688 #undef __FUNCT__ 689 #define __FUNCT__ "DMCreateColoring" 690 /*@C 691 DMCreateColoring - Gets coloring for a DMDA or DMComposite 692 693 Collective on DM 694 695 Input Parameter: 696 + dm - the DM object 697 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 698 - matype - either MATAIJ or MATBAIJ 699 700 Output Parameter: 701 . coloring - the coloring 702 703 Level: developer 704 705 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 706 707 @*/ 708 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring) 709 { 710 PetscErrorCode ierr; 711 712 PetscFunctionBegin; 713 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 714 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 715 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 716 PetscFunctionReturn(0); 717 } 718 719 #undef __FUNCT__ 720 #define __FUNCT__ "DMCreateMatrix" 721 /*@C 722 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 723 724 Collective on DM 725 726 Input Parameter: 727 + dm - the DM object 728 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 729 any type which inherits from one of these (such as MATAIJ) 730 731 Output Parameter: 732 . mat - the empty Jacobian 733 734 Level: beginner 735 736 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 737 do not need to do it yourself. 738 739 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 740 the nonzero pattern call DMDASetMatPreallocateOnly() 741 742 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 743 internally by PETSc. 744 745 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 746 the indices for the global numbering for DMDAs which is complicated. 747 748 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 749 750 @*/ 751 PetscErrorCode DMCreateMatrix(DM dm,const MatType mtype,Mat *mat) 752 { 753 PetscErrorCode ierr; 754 755 PetscFunctionBegin; 756 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 757 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 758 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 759 #endif 760 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 761 PetscValidPointer(mat,3); 762 if (dm->mattype) { 763 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 764 } else { 765 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 766 } 767 PetscFunctionReturn(0); 768 } 769 770 #undef __FUNCT__ 771 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 772 /*@ 773 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 774 preallocated but the nonzero structure and zero values will not be set. 775 776 Logically Collective on DMDA 777 778 Input Parameter: 779 + dm - the DM 780 - only - PETSC_TRUE if only want preallocation 781 782 Level: developer 783 .seealso DMCreateMatrix() 784 @*/ 785 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 786 { 787 PetscFunctionBegin; 788 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 789 dm->prealloc_only = only; 790 PetscFunctionReturn(0); 791 } 792 793 #undef __FUNCT__ 794 #define __FUNCT__ "DMGetWorkArray" 795 /*@C 796 DMGetWorkArray - Gets a work array guaranteed to be at least the input size 797 798 Not Collective 799 800 Input Parameters: 801 + dm - the DM object 802 - size - The minium size 803 804 Output Parameter: 805 . array - the work array 806 807 Level: developer 808 809 .seealso DMDestroy(), DMCreate() 810 @*/ 811 PetscErrorCode DMGetWorkArray(DM dm,PetscInt size,PetscScalar **array) 812 { 813 PetscErrorCode ierr; 814 815 PetscFunctionBegin; 816 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 817 PetscValidPointer(array,3); 818 if (size > dm->workSize) { 819 dm->workSize = size; 820 ierr = PetscFree(dm->workArray);CHKERRQ(ierr); 821 ierr = PetscMalloc(dm->workSize * sizeof(PetscScalar), &dm->workArray);CHKERRQ(ierr); 822 } 823 *array = dm->workArray; 824 PetscFunctionReturn(0); 825 } 826 827 828 #undef __FUNCT__ 829 #define __FUNCT__ "DMSetNullSpaceConstructor" 830 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 831 { 832 PetscFunctionBegin; 833 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 834 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 835 dm->nullspaceConstructors[field] = nullsp; 836 PetscFunctionReturn(0); 837 } 838 839 #undef __FUNCT__ 840 #define __FUNCT__ "DMCreateFieldIS" 841 /*@C 842 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 843 844 Not collective 845 846 Input Parameter: 847 . dm - the DM object 848 849 Output Parameters: 850 + numFields - The number of fields (or PETSC_NULL if not requested) 851 . fieldNames - The name for each field (or PETSC_NULL if not requested) 852 - fields - The global indices for each field (or PETSC_NULL if not requested) 853 854 Level: intermediate 855 856 Notes: 857 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 858 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 859 PetscFree(). 860 861 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 862 @*/ 863 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 864 { 865 PetscSection section, sectionGlobal; 866 PetscErrorCode ierr; 867 868 PetscFunctionBegin; 869 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 870 if (numFields) { 871 PetscValidPointer(numFields,2); 872 *numFields = 0; 873 } 874 if (fieldNames) { 875 PetscValidPointer(fieldNames,3); 876 *fieldNames = PETSC_NULL; 877 } 878 if (fields) { 879 PetscValidPointer(fields,4); 880 *fields = PETSC_NULL; 881 } 882 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 883 if (section) { 884 PetscInt *fieldSizes, **fieldIndices; 885 PetscInt nF, f, pStart, pEnd, p; 886 887 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 888 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 889 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 890 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 891 for(f = 0; f < nF; ++f) { 892 fieldSizes[f] = 0; 893 } 894 for(p = pStart; p < pEnd; ++p) { 895 PetscInt gdof; 896 897 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 898 if (gdof > 0) { 899 for(f = 0; f < nF; ++f) { 900 PetscInt fdof, fcdof; 901 902 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 903 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 904 fieldSizes[f] += fdof-fcdof; 905 } 906 } 907 } 908 for(f = 0; f < nF; ++f) { 909 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 910 fieldSizes[f] = 0; 911 } 912 for(p = pStart; p < pEnd; ++p) { 913 PetscInt gdof, goff; 914 915 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 916 if (gdof > 0) { 917 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 918 for(f = 0; f < nF; ++f) { 919 PetscInt fdof, fcdof, fc; 920 921 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 922 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 923 for(fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 924 fieldIndices[f][fieldSizes[f]] = goff++; 925 } 926 } 927 } 928 } 929 if (numFields) {*numFields = nF;} 930 if (fieldNames) { 931 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 932 for(f = 0; f < nF; ++f) { 933 const char *fieldName; 934 935 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 936 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 937 } 938 } 939 if (fields) { 940 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 941 for(f = 0; f < nF; ++f) { 942 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 943 } 944 } 945 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 946 } else { 947 if(dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 948 } 949 PetscFunctionReturn(0); 950 } 951 952 #undef __FUNCT__ 953 #define __FUNCT__ "DMCreateFieldDecompositionDM" 954 /*@C 955 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 956 957 Not Collective 958 959 Input Parameters: 960 + dm - the DM object 961 - name - the name of the field decomposition 962 963 Output Parameter: 964 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 965 966 Level: advanced 967 968 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 969 @*/ 970 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 971 { 972 PetscErrorCode ierr; 973 974 PetscFunctionBegin; 975 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 976 PetscValidCharPointer(name,2); 977 PetscValidPointer(ddm,3); 978 *ddm = PETSC_NULL; 979 if(dm->ops->createfielddecompositiondm) { 980 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 981 } 982 PetscFunctionReturn(0); 983 } 984 985 #undef __FUNCT__ 986 #define __FUNCT__ "DMCreateFieldDecomposition" 987 /*@C 988 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 989 corresponding to different fields: each IS contains the global indices of the dofs of the 990 corresponding field. The optional list of DMs define the DM for each subproblem. 991 Generalizes DMCreateFieldIS(). 992 993 Not collective 994 995 Input Parameter: 996 . dm - the DM object 997 998 Output Parameters: 999 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1000 . namelist - The name for each field (or PETSC_NULL if not requested) 1001 . islist - The global indices for each field (or PETSC_NULL if not requested) 1002 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1003 1004 Level: intermediate 1005 1006 Notes: 1007 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1008 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1009 and all of the arrays should be freed with PetscFree(). 1010 1011 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1012 @*/ 1013 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1014 { 1015 PetscErrorCode ierr; 1016 1017 PetscFunctionBegin; 1018 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1019 if (len) {PetscValidPointer(len,2); *len = 0;} 1020 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1021 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1022 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1023 if (!dm->ops->createfielddecomposition) { 1024 PetscSection section; 1025 PetscInt numFields, f; 1026 1027 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1028 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1029 if (section && numFields && dm->ops->createsubdm) { 1030 *len = numFields; 1031 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1032 for(f = 0; f < numFields; ++f) { 1033 const char *fieldName; 1034 1035 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1036 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1037 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1038 } 1039 } else { 1040 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1041 /* By default there are no DMs associated with subproblems. */ 1042 if(dmlist) *dmlist = PETSC_NULL; 1043 } 1044 } 1045 else { 1046 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1047 } 1048 PetscFunctionReturn(0); 1049 } 1050 1051 #undef __FUNCT__ 1052 #define __FUNCT__ "DMCreateSubDM" 1053 /*@C 1054 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1055 The fields are defined by DMCreateFieldIS(). 1056 1057 Not collective 1058 1059 Input Parameters: 1060 + dm - the DM object 1061 . numFields - number of fields in this subproblem 1062 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1063 1064 Output Parameters: 1065 . is - The global indices for the subproblem 1066 - dm - The DM for the subproblem 1067 1068 Level: intermediate 1069 1070 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1071 @*/ 1072 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1073 { 1074 PetscErrorCode ierr; 1075 1076 PetscFunctionBegin; 1077 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1078 PetscValidPointer(fields,3); 1079 if (is) {PetscValidPointer(is,4);} 1080 if (subdm) {PetscValidPointer(subdm,5);} 1081 if (dm->ops->createsubdm) { 1082 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1083 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1084 PetscFunctionReturn(0); 1085 } 1086 1087 #undef __FUNCT__ 1088 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1089 /*@C 1090 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1091 1092 Not Collective 1093 1094 Input Parameters: 1095 + dm - the DM object 1096 - name - the name of the subdomain decomposition 1097 1098 Output Parameter: 1099 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1100 1101 Level: advanced 1102 1103 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1104 @*/ 1105 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1106 { 1107 PetscErrorCode ierr; 1108 1109 PetscFunctionBegin; 1110 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1111 PetscValidCharPointer(name,2); 1112 PetscValidPointer(ddm,3); 1113 *ddm = PETSC_NULL; 1114 if(dm->ops->createdomaindecompositiondm) { 1115 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1116 } 1117 PetscFunctionReturn(0); 1118 } 1119 1120 #undef __FUNCT__ 1121 #define __FUNCT__ "DMCreateDomainDecomposition" 1122 /*@C 1123 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1124 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1125 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1126 define a nonoverlapping covering, while outer subdomains can overlap. 1127 The optional list of DMs define the DM for each subproblem. 1128 1129 Not collective 1130 1131 Input Parameter: 1132 . dm - the DM object 1133 1134 Output Parameters: 1135 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1136 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1137 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1138 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1139 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1140 1141 Level: intermediate 1142 1143 Notes: 1144 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1145 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1146 and all of the arrays should be freed with PetscFree(). 1147 1148 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1149 @*/ 1150 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1151 { 1152 PetscErrorCode ierr; 1153 1154 PetscFunctionBegin; 1155 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1156 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1157 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1158 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1159 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1160 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1161 if(dm->ops->createdomaindecomposition) { 1162 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1163 } 1164 PetscFunctionReturn(0); 1165 } 1166 1167 #undef __FUNCT__ 1168 #define __FUNCT__ "DMRefine" 1169 /*@ 1170 DMRefine - Refines a DM object 1171 1172 Collective on DM 1173 1174 Input Parameter: 1175 + dm - the DM object 1176 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1177 1178 Output Parameter: 1179 . dmf - the refined DM, or PETSC_NULL 1180 1181 Note: If no refinement was done, the return value is PETSC_NULL 1182 1183 Level: developer 1184 1185 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1186 @*/ 1187 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1188 { 1189 PetscErrorCode ierr; 1190 DMRefineHookLink link; 1191 1192 PetscFunctionBegin; 1193 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1194 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1195 if (*dmf) { 1196 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1197 (*dmf)->ops->initialguess = dm->ops->initialguess; 1198 (*dmf)->ops->function = dm->ops->function; 1199 (*dmf)->ops->functionj = dm->ops->functionj; 1200 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1201 (*dmf)->ops->jacobian = dm->ops->jacobian; 1202 } 1203 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1204 (*dmf)->ctx = dm->ctx; 1205 (*dmf)->leveldown = dm->leveldown; 1206 (*dmf)->levelup = dm->levelup + 1; 1207 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1208 for (link=dm->refinehook; link; link=link->next) { 1209 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1210 } 1211 } 1212 PetscFunctionReturn(0); 1213 } 1214 1215 #undef __FUNCT__ 1216 #define __FUNCT__ "DMRefineHookAdd" 1217 /*@ 1218 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1219 1220 Logically Collective 1221 1222 Input Arguments: 1223 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1224 . refinehook - function to run when setting up a coarser level 1225 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1226 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1227 1228 Calling sequence of refinehook: 1229 $ refinehook(DM coarse,DM fine,void *ctx); 1230 1231 + coarse - coarse level DM 1232 . fine - fine level DM to interpolate problem to 1233 - ctx - optional user-defined function context 1234 1235 Calling sequence for interphook: 1236 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1237 1238 + coarse - coarse level DM 1239 . interp - matrix interpolating a coarse-level solution to the finer grid 1240 . fine - fine level DM to update 1241 - ctx - optional user-defined function context 1242 1243 Level: advanced 1244 1245 Notes: 1246 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1247 1248 If this function is called multiple times, the hooks will be run in the order they are added. 1249 1250 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1251 @*/ 1252 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1253 { 1254 PetscErrorCode ierr; 1255 DMRefineHookLink link,*p; 1256 1257 PetscFunctionBegin; 1258 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1259 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1260 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1261 link->refinehook = refinehook; 1262 link->interphook = interphook; 1263 link->ctx = ctx; 1264 link->next = PETSC_NULL; 1265 *p = link; 1266 PetscFunctionReturn(0); 1267 } 1268 1269 #undef __FUNCT__ 1270 #define __FUNCT__ "DMInterpolate" 1271 /*@ 1272 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1273 1274 Collective if any hooks are 1275 1276 Input Arguments: 1277 + coarse - coarser DM to use as a base 1278 . restrct - interpolation matrix, apply using MatInterpolate() 1279 - fine - finer DM to update 1280 1281 Level: developer 1282 1283 .seealso: DMRefineHookAdd(), MatInterpolate() 1284 @*/ 1285 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1286 { 1287 PetscErrorCode ierr; 1288 DMRefineHookLink link; 1289 1290 PetscFunctionBegin; 1291 for (link=fine->refinehook; link; link=link->next) { 1292 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1293 } 1294 PetscFunctionReturn(0); 1295 } 1296 1297 #undef __FUNCT__ 1298 #define __FUNCT__ "DMGetRefineLevel" 1299 /*@ 1300 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1301 1302 Not Collective 1303 1304 Input Parameter: 1305 . dm - the DM object 1306 1307 Output Parameter: 1308 . level - number of refinements 1309 1310 Level: developer 1311 1312 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1313 1314 @*/ 1315 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1316 { 1317 PetscFunctionBegin; 1318 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1319 *level = dm->levelup; 1320 PetscFunctionReturn(0); 1321 } 1322 1323 #undef __FUNCT__ 1324 #define __FUNCT__ "DMGlobalToLocalBegin" 1325 /*@ 1326 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1327 1328 Neighbor-wise Collective on DM 1329 1330 Input Parameters: 1331 + dm - the DM object 1332 . g - the global vector 1333 . mode - INSERT_VALUES or ADD_VALUES 1334 - l - the local vector 1335 1336 1337 Level: beginner 1338 1339 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1340 1341 @*/ 1342 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1343 { 1344 PetscSF sf; 1345 PetscErrorCode ierr; 1346 1347 PetscFunctionBegin; 1348 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1349 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1350 if (sf) { 1351 PetscScalar *lArray, *gArray; 1352 1353 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1354 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1355 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1356 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1357 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1358 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1359 } else { 1360 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1361 } 1362 PetscFunctionReturn(0); 1363 } 1364 1365 #undef __FUNCT__ 1366 #define __FUNCT__ "DMGlobalToLocalEnd" 1367 /*@ 1368 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1369 1370 Neighbor-wise Collective on DM 1371 1372 Input Parameters: 1373 + dm - the DM object 1374 . g - the global vector 1375 . mode - INSERT_VALUES or ADD_VALUES 1376 - l - the local vector 1377 1378 1379 Level: beginner 1380 1381 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1382 1383 @*/ 1384 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1385 { 1386 PetscSF sf; 1387 PetscErrorCode ierr; 1388 PetscScalar *lArray, *gArray; 1389 1390 PetscFunctionBegin; 1391 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1392 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1393 if (sf) { 1394 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1395 1396 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1397 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1398 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1399 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1400 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1401 } else { 1402 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1403 } 1404 PetscFunctionReturn(0); 1405 } 1406 1407 #undef __FUNCT__ 1408 #define __FUNCT__ "DMLocalToGlobalBegin" 1409 /*@ 1410 DMLocalToGlobalBegin - updates global vectors from local vectors 1411 1412 Neighbor-wise Collective on DM 1413 1414 Input Parameters: 1415 + dm - the DM object 1416 . l - the local vector 1417 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1418 base point. 1419 - - the global vector 1420 1421 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1422 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1423 global array to the final global array with VecAXPY(). 1424 1425 Level: beginner 1426 1427 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1428 1429 @*/ 1430 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1431 { 1432 PetscSF sf; 1433 PetscErrorCode ierr; 1434 1435 PetscFunctionBegin; 1436 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1437 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1438 if (sf) { 1439 MPI_Op op; 1440 PetscScalar *lArray, *gArray; 1441 1442 switch(mode) { 1443 case INSERT_VALUES: 1444 case INSERT_ALL_VALUES: 1445 #if defined(PETSC_HAVE_MPI_REPLACE) 1446 op = MPI_REPLACE; break; 1447 #else 1448 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1449 #endif 1450 case ADD_VALUES: 1451 case ADD_ALL_VALUES: 1452 op = MPI_SUM; break; 1453 default: 1454 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1455 } 1456 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1457 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1458 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1459 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1460 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1461 } else { 1462 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1463 } 1464 PetscFunctionReturn(0); 1465 } 1466 1467 #undef __FUNCT__ 1468 #define __FUNCT__ "DMLocalToGlobalEnd" 1469 /*@ 1470 DMLocalToGlobalEnd - updates global vectors from local vectors 1471 1472 Neighbor-wise Collective on DM 1473 1474 Input Parameters: 1475 + dm - the DM object 1476 . l - the local vector 1477 . mode - INSERT_VALUES or ADD_VALUES 1478 - g - the global vector 1479 1480 1481 Level: beginner 1482 1483 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1484 1485 @*/ 1486 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1487 { 1488 PetscSF sf; 1489 PetscErrorCode ierr; 1490 1491 PetscFunctionBegin; 1492 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1493 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1494 if (sf) { 1495 MPI_Op op; 1496 PetscScalar *lArray, *gArray; 1497 1498 switch(mode) { 1499 case INSERT_VALUES: 1500 case INSERT_ALL_VALUES: 1501 #if defined(PETSC_HAVE_MPI_REPLACE) 1502 op = MPI_REPLACE; break; 1503 #else 1504 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1505 #endif 1506 case ADD_VALUES: 1507 case ADD_ALL_VALUES: 1508 op = MPI_SUM; break; 1509 default: 1510 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1511 } 1512 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1513 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1514 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1515 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1516 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1517 } else { 1518 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1519 } 1520 PetscFunctionReturn(0); 1521 } 1522 1523 #undef __FUNCT__ 1524 #define __FUNCT__ "DMComputeJacobianDefault" 1525 /*@ 1526 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1527 1528 Collective on DM 1529 1530 Input Parameter: 1531 + dm - the DM object 1532 . x - location to compute Jacobian at; may be ignored for linear problems 1533 . A - matrix that defines the operator for the linear solve 1534 - B - the matrix used to construct the preconditioner 1535 1536 Level: developer 1537 1538 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1539 DMSetFunction() 1540 1541 @*/ 1542 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1543 { 1544 PetscErrorCode ierr; 1545 1546 PetscFunctionBegin; 1547 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1548 *stflag = SAME_NONZERO_PATTERN; 1549 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1550 if (A != B) { 1551 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1552 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1553 } 1554 PetscFunctionReturn(0); 1555 } 1556 1557 #undef __FUNCT__ 1558 #define __FUNCT__ "DMCoarsen" 1559 /*@ 1560 DMCoarsen - Coarsens a DM object 1561 1562 Collective on DM 1563 1564 Input Parameter: 1565 + dm - the DM object 1566 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1567 1568 Output Parameter: 1569 . dmc - the coarsened DM 1570 1571 Level: developer 1572 1573 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1574 1575 @*/ 1576 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1577 { 1578 PetscErrorCode ierr; 1579 DMCoarsenHookLink link; 1580 1581 PetscFunctionBegin; 1582 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1583 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1584 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1585 (*dmc)->ops->initialguess = dm->ops->initialguess; 1586 (*dmc)->ops->function = dm->ops->function; 1587 (*dmc)->ops->functionj = dm->ops->functionj; 1588 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1589 (*dmc)->ops->jacobian = dm->ops->jacobian; 1590 } 1591 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1592 (*dmc)->ctx = dm->ctx; 1593 (*dmc)->levelup = dm->levelup; 1594 (*dmc)->leveldown = dm->leveldown + 1; 1595 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1596 for (link=dm->coarsenhook; link; link=link->next) { 1597 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1598 } 1599 PetscFunctionReturn(0); 1600 } 1601 1602 #undef __FUNCT__ 1603 #define __FUNCT__ "DMCoarsenHookAdd" 1604 /*@ 1605 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1606 1607 Logically Collective 1608 1609 Input Arguments: 1610 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1611 . coarsenhook - function to run when setting up a coarser level 1612 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1613 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1614 1615 Calling sequence of coarsenhook: 1616 $ coarsenhook(DM fine,DM coarse,void *ctx); 1617 1618 + fine - fine level DM 1619 . coarse - coarse level DM to restrict problem to 1620 - ctx - optional user-defined function context 1621 1622 Calling sequence for restricthook: 1623 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1624 1625 + fine - fine level DM 1626 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1627 . rscale - scaling vector for restriction 1628 . inject - matrix restricting by injection 1629 . coarse - coarse level DM to update 1630 - ctx - optional user-defined function context 1631 1632 Level: advanced 1633 1634 Notes: 1635 This function is only needed if auxiliary data needs to be set up on coarse grids. 1636 1637 If this function is called multiple times, the hooks will be run in the order they are added. 1638 1639 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1640 extract the finest level information from its context (instead of from the SNES). 1641 1642 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1643 @*/ 1644 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1645 { 1646 PetscErrorCode ierr; 1647 DMCoarsenHookLink link,*p; 1648 1649 PetscFunctionBegin; 1650 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1651 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1652 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1653 link->coarsenhook = coarsenhook; 1654 link->restricthook = restricthook; 1655 link->ctx = ctx; 1656 link->next = PETSC_NULL; 1657 *p = link; 1658 PetscFunctionReturn(0); 1659 } 1660 1661 #undef __FUNCT__ 1662 #define __FUNCT__ "DMRestrict" 1663 /*@ 1664 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1665 1666 Collective if any hooks are 1667 1668 Input Arguments: 1669 + fine - finer DM to use as a base 1670 . restrct - restriction matrix, apply using MatRestrict() 1671 . inject - injection matrix, also use MatRestrict() 1672 - coarse - coarer DM to update 1673 1674 Level: developer 1675 1676 .seealso: DMCoarsenHookAdd(), MatRestrict() 1677 @*/ 1678 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1679 { 1680 PetscErrorCode ierr; 1681 DMCoarsenHookLink link; 1682 1683 PetscFunctionBegin; 1684 for (link=fine->coarsenhook; link; link=link->next) { 1685 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1686 } 1687 PetscFunctionReturn(0); 1688 } 1689 1690 #undef __FUNCT__ 1691 #define __FUNCT__ "DMGetCoarsenLevel" 1692 /*@ 1693 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1694 1695 Not Collective 1696 1697 Input Parameter: 1698 . dm - the DM object 1699 1700 Output Parameter: 1701 . level - number of coarsenings 1702 1703 Level: developer 1704 1705 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1706 1707 @*/ 1708 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 1709 { 1710 PetscFunctionBegin; 1711 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1712 *level = dm->leveldown; 1713 PetscFunctionReturn(0); 1714 } 1715 1716 1717 1718 #undef __FUNCT__ 1719 #define __FUNCT__ "DMRefineHierarchy" 1720 /*@C 1721 DMRefineHierarchy - Refines a DM object, all levels at once 1722 1723 Collective on DM 1724 1725 Input Parameter: 1726 + dm - the DM object 1727 - nlevels - the number of levels of refinement 1728 1729 Output Parameter: 1730 . dmf - the refined DM hierarchy 1731 1732 Level: developer 1733 1734 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1735 1736 @*/ 1737 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 1738 { 1739 PetscErrorCode ierr; 1740 1741 PetscFunctionBegin; 1742 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1743 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1744 if (nlevels == 0) PetscFunctionReturn(0); 1745 if (dm->ops->refinehierarchy) { 1746 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 1747 } else if (dm->ops->refine) { 1748 PetscInt i; 1749 1750 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 1751 for (i=1; i<nlevels; i++) { 1752 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 1753 } 1754 } else { 1755 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 1756 } 1757 PetscFunctionReturn(0); 1758 } 1759 1760 #undef __FUNCT__ 1761 #define __FUNCT__ "DMCoarsenHierarchy" 1762 /*@C 1763 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 1764 1765 Collective on DM 1766 1767 Input Parameter: 1768 + dm - the DM object 1769 - nlevels - the number of levels of coarsening 1770 1771 Output Parameter: 1772 . dmc - the coarsened DM hierarchy 1773 1774 Level: developer 1775 1776 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1777 1778 @*/ 1779 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 1780 { 1781 PetscErrorCode ierr; 1782 1783 PetscFunctionBegin; 1784 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1785 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1786 if (nlevels == 0) PetscFunctionReturn(0); 1787 PetscValidPointer(dmc,3); 1788 if (dm->ops->coarsenhierarchy) { 1789 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 1790 } else if (dm->ops->coarsen) { 1791 PetscInt i; 1792 1793 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 1794 for (i=1; i<nlevels; i++) { 1795 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 1796 } 1797 } else { 1798 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 1799 } 1800 PetscFunctionReturn(0); 1801 } 1802 1803 #undef __FUNCT__ 1804 #define __FUNCT__ "DMCreateAggregates" 1805 /*@ 1806 DMCreateAggregates - Gets the aggregates that map between 1807 grids associated with two DMs. 1808 1809 Collective on DM 1810 1811 Input Parameters: 1812 + dmc - the coarse grid DM 1813 - dmf - the fine grid DM 1814 1815 Output Parameters: 1816 . rest - the restriction matrix (transpose of the projection matrix) 1817 1818 Level: intermediate 1819 1820 .keywords: interpolation, restriction, multigrid 1821 1822 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 1823 @*/ 1824 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 1825 { 1826 PetscErrorCode ierr; 1827 1828 PetscFunctionBegin; 1829 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 1830 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 1831 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 1832 PetscFunctionReturn(0); 1833 } 1834 1835 #undef __FUNCT__ 1836 #define __FUNCT__ "DMSetApplicationContextDestroy" 1837 /*@C 1838 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 1839 1840 Not Collective 1841 1842 Input Parameters: 1843 + dm - the DM object 1844 - destroy - the destroy function 1845 1846 Level: intermediate 1847 1848 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1849 1850 @*/ 1851 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 1852 { 1853 PetscFunctionBegin; 1854 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1855 dm->ctxdestroy = destroy; 1856 PetscFunctionReturn(0); 1857 } 1858 1859 #undef __FUNCT__ 1860 #define __FUNCT__ "DMSetApplicationContext" 1861 /*@ 1862 DMSetApplicationContext - Set a user context into a DM object 1863 1864 Not Collective 1865 1866 Input Parameters: 1867 + dm - the DM object 1868 - ctx - the user context 1869 1870 Level: intermediate 1871 1872 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1873 1874 @*/ 1875 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 1876 { 1877 PetscFunctionBegin; 1878 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1879 dm->ctx = ctx; 1880 PetscFunctionReturn(0); 1881 } 1882 1883 #undef __FUNCT__ 1884 #define __FUNCT__ "DMGetApplicationContext" 1885 /*@ 1886 DMGetApplicationContext - Gets a user context from a DM object 1887 1888 Not Collective 1889 1890 Input Parameter: 1891 . dm - the DM object 1892 1893 Output Parameter: 1894 . ctx - the user context 1895 1896 Level: intermediate 1897 1898 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1899 1900 @*/ 1901 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 1902 { 1903 PetscFunctionBegin; 1904 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1905 *(void**)ctx = dm->ctx; 1906 PetscFunctionReturn(0); 1907 } 1908 1909 #undef __FUNCT__ 1910 #define __FUNCT__ "DMSetInitialGuess" 1911 /*@C 1912 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 1913 1914 Logically Collective on DM 1915 1916 Input Parameter: 1917 + dm - the DM object to destroy 1918 - f - the function to compute the initial guess 1919 1920 Level: intermediate 1921 1922 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 1923 1924 @*/ 1925 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 1926 { 1927 PetscFunctionBegin; 1928 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1929 dm->ops->initialguess = f; 1930 PetscFunctionReturn(0); 1931 } 1932 1933 #undef __FUNCT__ 1934 #define __FUNCT__ "DMSetFunction" 1935 /*@C 1936 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 1937 1938 Logically Collective on DM 1939 1940 Input Parameter: 1941 + dm - the DM object 1942 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 1943 1944 Level: intermediate 1945 1946 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 1947 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 1948 1949 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1950 DMSetJacobian() 1951 1952 @*/ 1953 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 1954 { 1955 PetscFunctionBegin; 1956 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1957 dm->ops->function = f; 1958 if (f) { 1959 dm->ops->functionj = f; 1960 } 1961 PetscFunctionReturn(0); 1962 } 1963 1964 #undef __FUNCT__ 1965 #define __FUNCT__ "DMSetJacobian" 1966 /*@C 1967 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 1968 1969 Logically Collective on DM 1970 1971 Input Parameter: 1972 + dm - the DM object to destroy 1973 - f - the function to compute the matrix entries 1974 1975 Level: intermediate 1976 1977 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1978 DMSetFunction() 1979 1980 @*/ 1981 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 1982 { 1983 PetscFunctionBegin; 1984 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1985 dm->ops->jacobian = f; 1986 PetscFunctionReturn(0); 1987 } 1988 1989 #undef __FUNCT__ 1990 #define __FUNCT__ "DMSetVariableBounds" 1991 /*@C 1992 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 1993 1994 Logically Collective on DM 1995 1996 Input Parameter: 1997 + dm - the DM object 1998 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 1999 2000 Level: intermediate 2001 2002 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2003 DMSetJacobian() 2004 2005 @*/ 2006 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2007 { 2008 PetscFunctionBegin; 2009 dm->ops->computevariablebounds = f; 2010 PetscFunctionReturn(0); 2011 } 2012 2013 #undef __FUNCT__ 2014 #define __FUNCT__ "DMHasVariableBounds" 2015 /*@ 2016 DMHasVariableBounds - does the DM object have a variable bounds function? 2017 2018 Not Collective 2019 2020 Input Parameter: 2021 . dm - the DM object to destroy 2022 2023 Output Parameter: 2024 . flg - PETSC_TRUE if the variable bounds function exists 2025 2026 Level: developer 2027 2028 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2029 2030 @*/ 2031 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2032 { 2033 PetscFunctionBegin; 2034 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2035 PetscFunctionReturn(0); 2036 } 2037 2038 #undef __FUNCT__ 2039 #define __FUNCT__ "DMComputeVariableBounds" 2040 /*@C 2041 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2042 2043 Logically Collective on DM 2044 2045 Input Parameters: 2046 + dm - the DM object to destroy 2047 - x - current solution at which the bounds are computed 2048 2049 Output parameters: 2050 + xl - lower bound 2051 - xu - upper bound 2052 2053 Level: intermediate 2054 2055 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2056 DMSetFunction(), DMSetVariableBounds() 2057 2058 @*/ 2059 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2060 { 2061 PetscErrorCode ierr; 2062 PetscFunctionBegin; 2063 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2064 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2065 if(dm->ops->computevariablebounds) { 2066 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2067 } 2068 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2069 PetscFunctionReturn(0); 2070 } 2071 2072 #undef __FUNCT__ 2073 #define __FUNCT__ "DMComputeInitialGuess" 2074 /*@ 2075 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2076 2077 Collective on DM 2078 2079 Input Parameter: 2080 + dm - the DM object to destroy 2081 - x - the vector to hold the initial guess values 2082 2083 Level: developer 2084 2085 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2086 2087 @*/ 2088 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2089 { 2090 PetscErrorCode ierr; 2091 PetscFunctionBegin; 2092 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2093 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2094 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2095 PetscFunctionReturn(0); 2096 } 2097 2098 #undef __FUNCT__ 2099 #define __FUNCT__ "DMHasInitialGuess" 2100 /*@ 2101 DMHasInitialGuess - does the DM object have an initial guess function 2102 2103 Not Collective 2104 2105 Input Parameter: 2106 . dm - the DM object to destroy 2107 2108 Output Parameter: 2109 . flg - PETSC_TRUE if function exists 2110 2111 Level: developer 2112 2113 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2114 2115 @*/ 2116 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2117 { 2118 PetscFunctionBegin; 2119 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2120 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2121 PetscFunctionReturn(0); 2122 } 2123 2124 #undef __FUNCT__ 2125 #define __FUNCT__ "DMHasFunction" 2126 /*@ 2127 DMHasFunction - does the DM object have a function 2128 2129 Not Collective 2130 2131 Input Parameter: 2132 . dm - the DM object to destroy 2133 2134 Output Parameter: 2135 . flg - PETSC_TRUE if function exists 2136 2137 Level: developer 2138 2139 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2140 2141 @*/ 2142 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2143 { 2144 PetscFunctionBegin; 2145 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2146 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2147 PetscFunctionReturn(0); 2148 } 2149 2150 #undef __FUNCT__ 2151 #define __FUNCT__ "DMHasJacobian" 2152 /*@ 2153 DMHasJacobian - does the DM object have a matrix function 2154 2155 Not Collective 2156 2157 Input Parameter: 2158 . dm - the DM object to destroy 2159 2160 Output Parameter: 2161 . flg - PETSC_TRUE if function exists 2162 2163 Level: developer 2164 2165 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2166 2167 @*/ 2168 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2169 { 2170 PetscFunctionBegin; 2171 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2172 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2173 PetscFunctionReturn(0); 2174 } 2175 2176 #undef __FUNCT__ 2177 #define __FUNCT__ "DMHasColoring" 2178 /*@ 2179 DMHasColoring - does the DM object have a method of providing a coloring? 2180 2181 Not Collective 2182 2183 Input Parameter: 2184 . dm - the DM object 2185 2186 Output Parameter: 2187 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2188 2189 Level: developer 2190 2191 .seealso DMHasFunction(), DMCreateColoring() 2192 2193 @*/ 2194 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2195 { 2196 PetscFunctionBegin; 2197 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2198 PetscFunctionReturn(0); 2199 } 2200 2201 #undef __FUNCT__ 2202 #define __FUNCT__ "DMSetVec" 2203 /*@C 2204 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2205 2206 Collective on DM 2207 2208 Input Parameter: 2209 + dm - the DM object 2210 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2211 2212 Level: developer 2213 2214 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2215 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2216 2217 @*/ 2218 PetscErrorCode DMSetVec(DM dm,Vec x) 2219 { 2220 PetscErrorCode ierr; 2221 PetscFunctionBegin; 2222 if (x) { 2223 if (!dm->x) { 2224 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2225 } 2226 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2227 } 2228 else if(dm->x) { 2229 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2230 } 2231 PetscFunctionReturn(0); 2232 } 2233 2234 2235 #undef __FUNCT__ 2236 #define __FUNCT__ "DMComputeFunction" 2237 /*@ 2238 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2239 2240 Collective on DM 2241 2242 Input Parameter: 2243 + dm - the DM object to destroy 2244 . x - the location where the function is evaluationed, may be ignored for linear problems 2245 - b - the vector to hold the right hand side entries 2246 2247 Level: developer 2248 2249 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2250 DMSetJacobian() 2251 2252 @*/ 2253 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2254 { 2255 PetscErrorCode ierr; 2256 PetscFunctionBegin; 2257 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2258 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2259 PetscStackPush("DM user function"); 2260 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2261 PetscStackPop; 2262 PetscFunctionReturn(0); 2263 } 2264 2265 2266 2267 #undef __FUNCT__ 2268 #define __FUNCT__ "DMComputeJacobian" 2269 /*@ 2270 DMComputeJacobian - compute the matrix entries for the solver 2271 2272 Collective on DM 2273 2274 Input Parameter: 2275 + dm - the DM object 2276 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2277 . A - matrix that defines the operator for the linear solve 2278 - B - the matrix used to construct the preconditioner 2279 2280 Level: developer 2281 2282 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2283 DMSetFunction() 2284 2285 @*/ 2286 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2287 { 2288 PetscErrorCode ierr; 2289 2290 PetscFunctionBegin; 2291 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2292 if (!dm->ops->jacobian) { 2293 ISColoring coloring; 2294 MatFDColoring fd; 2295 const MatType mtype; 2296 2297 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2298 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2299 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2300 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2301 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2302 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2303 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2304 2305 dm->fd = fd; 2306 dm->ops->jacobian = DMComputeJacobianDefault; 2307 2308 /* don't know why this is needed */ 2309 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2310 } 2311 if (!x) x = dm->x; 2312 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2313 2314 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2315 if (x) { 2316 if (!dm->x) { 2317 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2318 } 2319 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2320 } 2321 if (A != B) { 2322 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2323 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2324 } 2325 PetscFunctionReturn(0); 2326 } 2327 2328 2329 PetscFList DMList = PETSC_NULL; 2330 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2331 2332 #undef __FUNCT__ 2333 #define __FUNCT__ "DMSetType" 2334 /*@C 2335 DMSetType - Builds a DM, for a particular DM implementation. 2336 2337 Collective on DM 2338 2339 Input Parameters: 2340 + dm - The DM object 2341 - method - The name of the DM type 2342 2343 Options Database Key: 2344 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2345 2346 Notes: 2347 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2348 2349 Level: intermediate 2350 2351 .keywords: DM, set, type 2352 .seealso: DMGetType(), DMCreate() 2353 @*/ 2354 PetscErrorCode DMSetType(DM dm, const DMType method) 2355 { 2356 PetscErrorCode (*r)(DM); 2357 PetscBool match; 2358 PetscErrorCode ierr; 2359 2360 PetscFunctionBegin; 2361 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2362 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2363 if (match) PetscFunctionReturn(0); 2364 2365 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2366 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2367 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2368 2369 if (dm->ops->destroy) { 2370 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2371 dm->ops->destroy = PETSC_NULL; 2372 } 2373 ierr = (*r)(dm);CHKERRQ(ierr); 2374 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2375 PetscFunctionReturn(0); 2376 } 2377 2378 #undef __FUNCT__ 2379 #define __FUNCT__ "DMGetType" 2380 /*@C 2381 DMGetType - Gets the DM type name (as a string) from the DM. 2382 2383 Not Collective 2384 2385 Input Parameter: 2386 . dm - The DM 2387 2388 Output Parameter: 2389 . type - The DM type name 2390 2391 Level: intermediate 2392 2393 .keywords: DM, get, type, name 2394 .seealso: DMSetType(), DMCreate() 2395 @*/ 2396 PetscErrorCode DMGetType(DM dm, const DMType *type) 2397 { 2398 PetscErrorCode ierr; 2399 2400 PetscFunctionBegin; 2401 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2402 PetscValidCharPointer(type,2); 2403 if (!DMRegisterAllCalled) { 2404 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2405 } 2406 *type = ((PetscObject)dm)->type_name; 2407 PetscFunctionReturn(0); 2408 } 2409 2410 #undef __FUNCT__ 2411 #define __FUNCT__ "DMConvert" 2412 /*@C 2413 DMConvert - Converts a DM to another DM, either of the same or different type. 2414 2415 Collective on DM 2416 2417 Input Parameters: 2418 + dm - the DM 2419 - newtype - new DM type (use "same" for the same type) 2420 2421 Output Parameter: 2422 . M - pointer to new DM 2423 2424 Notes: 2425 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2426 the MPI communicator of the generated DM is always the same as the communicator 2427 of the input DM. 2428 2429 Level: intermediate 2430 2431 .seealso: DMCreate() 2432 @*/ 2433 PetscErrorCode DMConvert(DM dm, const DMType newtype, DM *M) 2434 { 2435 DM B; 2436 char convname[256]; 2437 PetscBool sametype, issame; 2438 PetscErrorCode ierr; 2439 2440 PetscFunctionBegin; 2441 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2442 PetscValidType(dm,1); 2443 PetscValidPointer(M,3); 2444 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2445 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2446 { 2447 PetscErrorCode (*conv)(DM, const DMType, DM *) = PETSC_NULL; 2448 2449 /* 2450 Order of precedence: 2451 1) See if a specialized converter is known to the current DM. 2452 2) See if a specialized converter is known to the desired DM class. 2453 3) See if a good general converter is registered for the desired class 2454 4) See if a good general converter is known for the current matrix. 2455 5) Use a really basic converter. 2456 */ 2457 2458 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2459 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2460 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2461 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2462 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2463 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2464 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2465 if (conv) goto foundconv; 2466 2467 /* 2) See if a specialized converter is known to the desired DM class. */ 2468 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2469 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2470 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2471 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2472 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2473 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2474 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2475 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2476 if (conv) { 2477 ierr = DMDestroy(&B);CHKERRQ(ierr); 2478 goto foundconv; 2479 } 2480 2481 #if 0 2482 /* 3) See if a good general converter is registered for the desired class */ 2483 conv = B->ops->convertfrom; 2484 ierr = DMDestroy(&B);CHKERRQ(ierr); 2485 if (conv) goto foundconv; 2486 2487 /* 4) See if a good general converter is known for the current matrix */ 2488 if (dm->ops->convert) { 2489 conv = dm->ops->convert; 2490 } 2491 if (conv) goto foundconv; 2492 #endif 2493 2494 /* 5) Use a really basic converter. */ 2495 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2496 2497 foundconv: 2498 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2499 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2500 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2501 } 2502 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2503 PetscFunctionReturn(0); 2504 } 2505 2506 /*--------------------------------------------------------------------------------------------------------------------*/ 2507 2508 #undef __FUNCT__ 2509 #define __FUNCT__ "DMRegister" 2510 /*@C 2511 DMRegister - See DMRegisterDynamic() 2512 2513 Level: advanced 2514 @*/ 2515 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2516 { 2517 char fullname[PETSC_MAX_PATH_LEN]; 2518 PetscErrorCode ierr; 2519 2520 PetscFunctionBegin; 2521 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2522 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2523 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2524 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2525 PetscFunctionReturn(0); 2526 } 2527 2528 2529 /*--------------------------------------------------------------------------------------------------------------------*/ 2530 #undef __FUNCT__ 2531 #define __FUNCT__ "DMRegisterDestroy" 2532 /*@C 2533 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2534 2535 Not Collective 2536 2537 Level: advanced 2538 2539 .keywords: DM, register, destroy 2540 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2541 @*/ 2542 PetscErrorCode DMRegisterDestroy(void) 2543 { 2544 PetscErrorCode ierr; 2545 2546 PetscFunctionBegin; 2547 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2548 DMRegisterAllCalled = PETSC_FALSE; 2549 PetscFunctionReturn(0); 2550 } 2551 2552 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2553 #include <mex.h> 2554 2555 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2556 2557 #undef __FUNCT__ 2558 #define __FUNCT__ "DMComputeFunction_Matlab" 2559 /* 2560 DMComputeFunction_Matlab - Calls the function that has been set with 2561 DMSetFunctionMatlab(). 2562 2563 For linear problems x is null 2564 2565 .seealso: DMSetFunction(), DMGetFunction() 2566 */ 2567 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2568 { 2569 PetscErrorCode ierr; 2570 DMMatlabContext *sctx; 2571 int nlhs = 1,nrhs = 4; 2572 mxArray *plhs[1],*prhs[4]; 2573 long long int lx = 0,ly = 0,ls = 0; 2574 2575 PetscFunctionBegin; 2576 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2577 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2578 PetscCheckSameComm(dm,1,y,3); 2579 2580 /* call Matlab function in ctx with arguments x and y */ 2581 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2582 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2583 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2584 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2585 prhs[0] = mxCreateDoubleScalar((double)ls); 2586 prhs[1] = mxCreateDoubleScalar((double)lx); 2587 prhs[2] = mxCreateDoubleScalar((double)ly); 2588 prhs[3] = mxCreateString(sctx->funcname); 2589 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2590 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2591 mxDestroyArray(prhs[0]); 2592 mxDestroyArray(prhs[1]); 2593 mxDestroyArray(prhs[2]); 2594 mxDestroyArray(prhs[3]); 2595 mxDestroyArray(plhs[0]); 2596 PetscFunctionReturn(0); 2597 } 2598 2599 2600 #undef __FUNCT__ 2601 #define __FUNCT__ "DMSetFunctionMatlab" 2602 /* 2603 DMSetFunctionMatlab - Sets the function evaluation routine 2604 2605 */ 2606 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2607 { 2608 PetscErrorCode ierr; 2609 DMMatlabContext *sctx; 2610 2611 PetscFunctionBegin; 2612 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2613 /* currently sctx is memory bleed */ 2614 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2615 if (!sctx) { 2616 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2617 } 2618 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2619 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2620 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2621 PetscFunctionReturn(0); 2622 } 2623 2624 #undef __FUNCT__ 2625 #define __FUNCT__ "DMComputeJacobian_Matlab" 2626 /* 2627 DMComputeJacobian_Matlab - Calls the function that has been set with 2628 DMSetJacobianMatlab(). 2629 2630 For linear problems x is null 2631 2632 .seealso: DMSetFunction(), DMGetFunction() 2633 */ 2634 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2635 { 2636 PetscErrorCode ierr; 2637 DMMatlabContext *sctx; 2638 int nlhs = 2,nrhs = 5; 2639 mxArray *plhs[2],*prhs[5]; 2640 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2641 2642 PetscFunctionBegin; 2643 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2644 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2645 2646 /* call MATLAB function in ctx with arguments x, A, and B */ 2647 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2648 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2649 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2650 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2651 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2652 prhs[0] = mxCreateDoubleScalar((double)ls); 2653 prhs[1] = mxCreateDoubleScalar((double)lx); 2654 prhs[2] = mxCreateDoubleScalar((double)lA); 2655 prhs[3] = mxCreateDoubleScalar((double)lB); 2656 prhs[4] = mxCreateString(sctx->jacname); 2657 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2658 *str = (MatStructure) mxGetScalar(plhs[0]); 2659 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2660 mxDestroyArray(prhs[0]); 2661 mxDestroyArray(prhs[1]); 2662 mxDestroyArray(prhs[2]); 2663 mxDestroyArray(prhs[3]); 2664 mxDestroyArray(prhs[4]); 2665 mxDestroyArray(plhs[0]); 2666 mxDestroyArray(plhs[1]); 2667 PetscFunctionReturn(0); 2668 } 2669 2670 2671 #undef __FUNCT__ 2672 #define __FUNCT__ "DMSetJacobianMatlab" 2673 /* 2674 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2675 2676 */ 2677 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2678 { 2679 PetscErrorCode ierr; 2680 DMMatlabContext *sctx; 2681 2682 PetscFunctionBegin; 2683 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2684 /* currently sctx is memory bleed */ 2685 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2686 if (!sctx) { 2687 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2688 } 2689 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2690 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2691 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2692 PetscFunctionReturn(0); 2693 } 2694 #endif 2695 2696 #undef __FUNCT__ 2697 #define __FUNCT__ "DMLoad" 2698 /*@C 2699 DMLoad - Loads a DM that has been stored in binary or HDF5 format 2700 with DMView(). 2701 2702 Collective on PetscViewer 2703 2704 Input Parameters: 2705 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2706 some related function before a call to DMLoad(). 2707 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2708 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2709 2710 Level: intermediate 2711 2712 Notes: 2713 Defaults to the DM DA. 2714 2715 Notes for advanced users: 2716 Most users should not need to know the details of the binary storage 2717 format, since DMLoad() and DMView() completely hide these details. 2718 But for anyone who's interested, the standard binary matrix storage 2719 format is 2720 .vb 2721 has not yet been determined 2722 .ve 2723 2724 In addition, PETSc automatically does the byte swapping for 2725 machines that store the bytes reversed, e.g. DEC alpha, freebsd, 2726 linux, Windows and the paragon; thus if you write your own binary 2727 read/write routines you have to swap the bytes; see PetscBinaryRead() 2728 and PetscBinaryWrite() to see how this may be done. 2729 2730 Concepts: vector^loading from file 2731 2732 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2733 @*/ 2734 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2735 { 2736 PetscErrorCode ierr; 2737 2738 PetscFunctionBegin; 2739 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2740 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2741 2742 if (!((PetscObject)newdm)->type_name) { 2743 ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr); 2744 } 2745 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2746 PetscFunctionReturn(0); 2747 } 2748 2749 /******************************** FEM Support **********************************/ 2750 2751 #undef __FUNCT__ 2752 #define __FUNCT__ "DMPrintCellVector" 2753 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 2754 PetscInt f; 2755 PetscErrorCode ierr; 2756 2757 PetscFunctionBegin; 2758 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2759 for(f = 0; f < len; ++f) { 2760 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2761 } 2762 PetscFunctionReturn(0); 2763 } 2764 2765 #undef __FUNCT__ 2766 #define __FUNCT__ "DMPrintCellMatrix" 2767 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 2768 PetscInt f, g; 2769 PetscErrorCode ierr; 2770 2771 PetscFunctionBegin; 2772 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2773 for(f = 0; f < rows; ++f) { 2774 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2775 for(g = 0; g < cols; ++g) { 2776 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2777 } 2778 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2779 } 2780 PetscFunctionReturn(0); 2781 } 2782 2783 #undef __FUNCT__ 2784 #define __FUNCT__ "DMGetLocalFunction" 2785 /*@C 2786 DMGetLocalFunction - Get the local residual function from this DM 2787 2788 Not collective 2789 2790 Input Parameter: 2791 . dm - The DM 2792 2793 Output Parameter: 2794 . lf - The local residual function 2795 2796 Calling sequence of lf: 2797 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2798 2799 + snes - the SNES context 2800 . x - local vector with the state at which to evaluate residual 2801 . f - local vector to put residual in 2802 - ctx - optional user-defined function context 2803 2804 Level: intermediate 2805 2806 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2807 @*/ 2808 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 2809 { 2810 PetscFunctionBegin; 2811 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2812 if (lf) *lf = dm->lf; 2813 PetscFunctionReturn(0); 2814 } 2815 2816 #undef __FUNCT__ 2817 #define __FUNCT__ "DMSetLocalFunction" 2818 /*@C 2819 DMSetLocalFunction - Set the local residual function from this DM 2820 2821 Not collective 2822 2823 Input Parameters: 2824 + dm - The DM 2825 - lf - The local residual function 2826 2827 Calling sequence of lf: 2828 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2829 2830 + snes - the SNES context 2831 . x - local vector with the state at which to evaluate residual 2832 . f - local vector to put residual in 2833 - ctx - optional user-defined function context 2834 2835 Level: intermediate 2836 2837 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2838 @*/ 2839 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 2840 { 2841 PetscFunctionBegin; 2842 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2843 dm->lf = lf; 2844 PetscFunctionReturn(0); 2845 } 2846 2847 #undef __FUNCT__ 2848 #define __FUNCT__ "DMGetLocalJacobian" 2849 /*@C 2850 DMGetLocalJacobian - Get the local Jacobian function from this DM 2851 2852 Not collective 2853 2854 Input Parameter: 2855 . dm - The DM 2856 2857 Output Parameter: 2858 . lj - The local Jacobian function 2859 2860 Calling sequence of lj: 2861 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 2862 2863 + snes - the SNES context 2864 . x - local vector with the state at which to evaluate residual 2865 . J - matrix to put Jacobian in 2866 . M - matrix to use for defining Jacobian preconditioner 2867 - ctx - optional user-defined function context 2868 2869 Level: intermediate 2870 2871 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 2872 @*/ 2873 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 2874 { 2875 PetscFunctionBegin; 2876 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2877 if (lj) *lj = dm->lj; 2878 PetscFunctionReturn(0); 2879 } 2880 2881 #undef __FUNCT__ 2882 #define __FUNCT__ "DMSetLocalJacobian" 2883 /*@C 2884 DMSetLocalJacobian - Set the local Jacobian function from this DM 2885 2886 Not collective 2887 2888 Input Parameters: 2889 + dm - The DM 2890 - lj - The local Jacobian function 2891 2892 Calling sequence of lj: 2893 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 2894 2895 + snes - the SNES context 2896 . x - local vector with the state at which to evaluate residual 2897 . J - matrix to put Jacobian in 2898 . M - matrix to use for defining Jacobian preconditioner 2899 - ctx - optional user-defined function context 2900 2901 Level: intermediate 2902 2903 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 2904 @*/ 2905 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 2906 { 2907 PetscFunctionBegin; 2908 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2909 dm->lj = lj; 2910 PetscFunctionReturn(0); 2911 } 2912 2913 #undef __FUNCT__ 2914 #define __FUNCT__ "DMGetDefaultSection" 2915 /*@ 2916 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2917 2918 Input Parameter: 2919 . dm - The DM 2920 2921 Output Parameter: 2922 . section - The PetscSection 2923 2924 Level: intermediate 2925 2926 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2927 2928 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2929 @*/ 2930 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 2931 PetscFunctionBegin; 2932 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2933 PetscValidPointer(section, 2); 2934 *section = dm->defaultSection; 2935 PetscFunctionReturn(0); 2936 } 2937 2938 #undef __FUNCT__ 2939 #define __FUNCT__ "DMSetDefaultSection" 2940 /*@ 2941 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2942 2943 Input Parameters: 2944 + dm - The DM 2945 - section - The PetscSection 2946 2947 Level: intermediate 2948 2949 Note: Any existing Section will be destroyed 2950 2951 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2952 @*/ 2953 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 2954 PetscErrorCode ierr; 2955 2956 PetscFunctionBegin; 2957 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2958 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2959 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2960 dm->defaultSection = section; 2961 PetscFunctionReturn(0); 2962 } 2963 2964 #undef __FUNCT__ 2965 #define __FUNCT__ "DMGetDefaultGlobalSection" 2966 /*@ 2967 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2968 2969 Input Parameter: 2970 . dm - The DM 2971 2972 Output Parameter: 2973 . section - The PetscSection 2974 2975 Level: intermediate 2976 2977 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2978 2979 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2980 @*/ 2981 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 2982 PetscErrorCode ierr; 2983 2984 PetscFunctionBegin; 2985 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2986 PetscValidPointer(section, 2); 2987 if (!dm->defaultGlobalSection) { 2988 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, &dm->defaultGlobalSection);CHKERRQ(ierr); 2989 } 2990 *section = dm->defaultGlobalSection; 2991 PetscFunctionReturn(0); 2992 } 2993 2994 #undef __FUNCT__ 2995 #define __FUNCT__ "DMGetDefaultSF" 2996 /*@ 2997 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 2998 it is created from the default PetscSection layouts in the DM. 2999 3000 Input Parameter: 3001 . dm - The DM 3002 3003 Output Parameter: 3004 . sf - The PetscSF 3005 3006 Level: intermediate 3007 3008 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3009 3010 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3011 @*/ 3012 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3013 PetscInt nroots; 3014 PetscErrorCode ierr; 3015 3016 PetscFunctionBegin; 3017 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3018 PetscValidPointer(sf, 2); 3019 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3020 if (nroots < 0) { 3021 PetscSection section, gSection; 3022 3023 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3024 if (section) { 3025 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3026 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3027 } else { 3028 *sf = PETSC_NULL; 3029 PetscFunctionReturn(0); 3030 } 3031 } 3032 *sf = dm->defaultSF; 3033 PetscFunctionReturn(0); 3034 } 3035 3036 #undef __FUNCT__ 3037 #define __FUNCT__ "DMSetDefaultSF" 3038 /*@ 3039 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3040 3041 Input Parameters: 3042 + dm - The DM 3043 - sf - The PetscSF 3044 3045 Level: intermediate 3046 3047 Note: Any previous SF is destroyed 3048 3049 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3050 @*/ 3051 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3052 PetscErrorCode ierr; 3053 3054 PetscFunctionBegin; 3055 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3056 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3057 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3058 dm->defaultSF = sf; 3059 PetscFunctionReturn(0); 3060 } 3061 3062 #undef __FUNCT__ 3063 #define __FUNCT__ "DMCreateDefaultSF" 3064 /*@C 3065 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3066 describing the data layout. 3067 3068 Input Parameters: 3069 + dm - The DM 3070 . localSection - PetscSection describing the local data layout 3071 - globalSection - PetscSection describing the global data layout 3072 3073 Level: intermediate 3074 3075 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3076 @*/ 3077 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3078 { 3079 MPI_Comm comm = ((PetscObject) dm)->comm; 3080 PetscLayout layout; 3081 const PetscInt *ranges; 3082 PetscInt *local; 3083 PetscSFNode *remote; 3084 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3085 PetscMPIInt size, rank; 3086 PetscErrorCode ierr; 3087 3088 PetscFunctionBegin; 3089 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3090 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3091 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3092 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3093 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3094 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3095 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3096 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3097 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3098 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3099 for(p = pStart, nleaves = 0; p < pEnd; ++p) { 3100 PetscInt dof, cdof; 3101 3102 ierr = PetscSectionGetDof(globalSection, p, &dof);CHKERRQ(ierr); 3103 ierr = PetscSectionGetConstraintDof(globalSection, p, &cdof);CHKERRQ(ierr); 3104 nleaves += dof < 0 ? -(dof+1)-cdof : dof-cdof; 3105 } 3106 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3107 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3108 for(p = pStart, l = 0; p < pEnd; ++p) { 3109 PetscInt *cind; 3110 PetscInt dof, gdof, cdof, dim, off, goff, d, c; 3111 3112 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3113 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3114 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3115 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3116 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3117 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3118 dim = dof-cdof; 3119 for(d = 0, c = 0; d < dof; ++d) { 3120 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3121 local[l+d-c] = off+d; 3122 } 3123 if (gdof < 0) { 3124 for(d = 0; d < dim; ++d, ++l) { 3125 PetscInt offset = -(goff+1) + d, r; 3126 3127 for(r = 0; r < size; ++r) { 3128 if ((offset >= ranges[r]) && (offset < ranges[r+1])) break; 3129 } 3130 remote[l].rank = r; 3131 remote[l].index = offset - ranges[r]; 3132 } 3133 } else { 3134 for(d = 0; d < dim; ++d, ++l) { 3135 remote[l].rank = rank; 3136 remote[l].index = goff+d - ranges[rank]; 3137 } 3138 } 3139 } 3140 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3141 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3142 PetscFunctionReturn(0); 3143 } 3144