1 #include <petscsnes.h> 2 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = PETSC_NULL; 35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 36 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 39 #endif 40 41 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 42 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 43 44 45 v->workSize = 0; 46 v->workArray = PETSC_NULL; 47 v->ltogmap = PETSC_NULL; 48 v->ltogmapb = PETSC_NULL; 49 v->bs = 1; 50 v->coloringtype = IS_COLORING_GLOBAL; 51 v->lf = PETSC_NULL; 52 v->lj = PETSC_NULL; 53 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 54 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 55 v->defaultSection = PETSC_NULL; 56 v->defaultGlobalSection = PETSC_NULL; 57 { 58 PetscInt i; 59 for(i = 0; i < 10; ++i) { 60 v->nullspaceConstructors[i] = PETSC_NULL; 61 } 62 } 63 64 *dm = v; 65 PetscFunctionReturn(0); 66 } 67 68 69 #undef __FUNCT__ 70 #define __FUNCT__ "DMSetVecType" 71 /*@C 72 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 73 74 Logically Collective on DMDA 75 76 Input Parameter: 77 + da - initial distributed array 78 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 79 80 Options Database: 81 . -dm_vec_type ctype 82 83 Level: intermediate 84 85 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 86 @*/ 87 PetscErrorCode DMSetVecType(DM da,const VecType ctype) 88 { 89 PetscErrorCode ierr; 90 91 PetscFunctionBegin; 92 PetscValidHeaderSpecific(da,DM_CLASSID,1); 93 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 94 ierr = PetscStrallocpy(ctype,&da->vectype);CHKERRQ(ierr); 95 PetscFunctionReturn(0); 96 } 97 98 #undef __FUNCT__ 99 #define __FUNCT__ "DMSetMatType" 100 /*@C 101 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 102 103 Logically Collective on DM 104 105 Input Parameter: 106 + dm - the DM context 107 . ctype - the matrix type 108 109 Options Database: 110 . -dm_mat_type ctype 111 112 Level: intermediate 113 114 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 115 @*/ 116 PetscErrorCode DMSetMatType(DM dm,const MatType ctype) 117 { 118 PetscErrorCode ierr; 119 PetscFunctionBegin; 120 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 121 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 122 ierr = PetscStrallocpy(ctype,&dm->mattype);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 126 #undef __FUNCT__ 127 #define __FUNCT__ "DMSetOptionsPrefix" 128 /*@C 129 DMSetOptionsPrefix - Sets the prefix used for searching for all 130 DMDA options in the database. 131 132 Logically Collective on DMDA 133 134 Input Parameter: 135 + da - the DMDA context 136 - prefix - the prefix to prepend to all option names 137 138 Notes: 139 A hyphen (-) must NOT be given at the beginning of the prefix name. 140 The first character of all runtime options is AUTOMATICALLY the hyphen. 141 142 Level: advanced 143 144 .keywords: DMDA, set, options, prefix, database 145 146 .seealso: DMSetFromOptions() 147 @*/ 148 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 149 { 150 PetscErrorCode ierr; 151 152 PetscFunctionBegin; 153 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 154 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 155 PetscFunctionReturn(0); 156 } 157 158 #undef __FUNCT__ 159 #define __FUNCT__ "DMDestroy" 160 /*@ 161 DMDestroy - Destroys a vector packer or DMDA. 162 163 Collective on DM 164 165 Input Parameter: 166 . dm - the DM object to destroy 167 168 Level: developer 169 170 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 171 172 @*/ 173 PetscErrorCode DMDestroy(DM *dm) 174 { 175 PetscInt i, cnt = 0; 176 DMNamedVecLink nlink,nnext; 177 PetscErrorCode ierr; 178 179 PetscFunctionBegin; 180 if (!*dm) PetscFunctionReturn(0); 181 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 182 183 /* count all the circular references of DM and its contained Vecs */ 184 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 185 if ((*dm)->localin[i]) {cnt++;} 186 if ((*dm)->globalin[i]) {cnt++;} 187 } 188 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 189 if ((*dm)->x) { 190 PetscObject obj; 191 ierr = PetscObjectQuery((PetscObject)(*dm)->x,"DM",&obj);CHKERRQ(ierr); 192 if (obj == (PetscObject)*dm) cnt++; 193 } 194 195 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 196 /* 197 Need this test because the dm references the vectors that 198 reference the dm, so destroying the dm calls destroy on the 199 vectors that cause another destroy on the dm 200 */ 201 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 202 ((PetscObject) (*dm))->refct = 0; 203 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 204 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 205 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 206 } 207 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 208 nnext = nlink->next; 209 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 210 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 211 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 212 ierr = PetscFree(nlink);CHKERRQ(ierr); 213 } 214 (*dm)->namedglobal = PETSC_NULL; 215 216 /* Destroy the list of hooks */ 217 { 218 DMCoarsenHookLink link,next; 219 for (link=(*dm)->coarsenhook; link; link=next) { 220 next = link->next; 221 ierr = PetscFree(link);CHKERRQ(ierr); 222 } 223 (*dm)->coarsenhook = PETSC_NULL; 224 } 225 { 226 DMRefineHookLink link,next; 227 for (link=(*dm)->refinehook; link; link=next) { 228 next = link->next; 229 ierr = PetscFree(link);CHKERRQ(ierr); 230 } 231 (*dm)->refinehook = PETSC_NULL; 232 } 233 234 if ((*dm)->ctx && (*dm)->ctxdestroy) { 235 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 236 } 237 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 238 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 239 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 240 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 241 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 242 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 243 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 244 ierr = PetscFree((*dm)->workArray);CHKERRQ(ierr); 245 246 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 247 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 248 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 249 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 250 /* if memory was published with AMS then destroy it */ 251 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 252 253 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 254 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 255 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 256 PetscFunctionReturn(0); 257 } 258 259 #undef __FUNCT__ 260 #define __FUNCT__ "DMSetUp" 261 /*@ 262 DMSetUp - sets up the data structures inside a DM object 263 264 Collective on DM 265 266 Input Parameter: 267 . dm - the DM object to setup 268 269 Level: developer 270 271 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 272 273 @*/ 274 PetscErrorCode DMSetUp(DM dm) 275 { 276 PetscErrorCode ierr; 277 278 PetscFunctionBegin; 279 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 280 if (dm->setupcalled) PetscFunctionReturn(0); 281 if (dm->ops->setup) { 282 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 283 } 284 dm->setupcalled = PETSC_TRUE; 285 PetscFunctionReturn(0); 286 } 287 288 #undef __FUNCT__ 289 #define __FUNCT__ "DMSetFromOptions" 290 /*@ 291 DMSetFromOptions - sets parameters in a DM from the options database 292 293 Collective on DM 294 295 Input Parameter: 296 . dm - the DM object to set options for 297 298 Options Database: 299 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 300 . -dm_vec_type <type> type of vector to create inside DM 301 . -dm_mat_type <type> type of matrix to create inside DM 302 - -dm_coloring_type <global or ghosted> 303 304 Level: developer 305 306 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 307 308 @*/ 309 PetscErrorCode DMSetFromOptions(DM dm) 310 { 311 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 312 PetscErrorCode ierr; 313 char typeName[256] = MATAIJ; 314 315 PetscFunctionBegin; 316 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 317 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 318 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 319 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 320 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 321 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 322 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 323 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 324 if (flg) { 325 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 326 } 327 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof typeName,&flg);CHKERRQ(ierr); 328 if (flg) { 329 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 330 } 331 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 332 if (dm->ops->setfromoptions) { 333 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 334 } 335 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 336 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 337 ierr = PetscOptionsEnd();CHKERRQ(ierr); 338 if (flg1) { 339 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 340 } 341 if (flg2) { 342 PetscViewer viewer; 343 344 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 345 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 346 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 347 ierr = DMView(dm, viewer);CHKERRQ(ierr); 348 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 349 } 350 if (flg3) { 351 PetscViewer viewer; 352 353 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 354 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 355 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 356 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 357 ierr = DMView(dm, viewer);CHKERRQ(ierr); 358 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 359 } 360 if (flg4) { 361 PetscViewer viewer; 362 363 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 364 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 365 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 366 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 367 ierr = DMView(dm, viewer);CHKERRQ(ierr); 368 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 369 } 370 PetscFunctionReturn(0); 371 } 372 373 #undef __FUNCT__ 374 #define __FUNCT__ "DMView" 375 /*@C 376 DMView - Views a vector packer or DMDA. 377 378 Collective on DM 379 380 Input Parameter: 381 + dm - the DM object to view 382 - v - the viewer 383 384 Level: developer 385 386 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 387 388 @*/ 389 PetscErrorCode DMView(DM dm,PetscViewer v) 390 { 391 PetscErrorCode ierr; 392 393 PetscFunctionBegin; 394 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 395 if (!v) { 396 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 397 } 398 if (dm->ops->view) { 399 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 400 } 401 PetscFunctionReturn(0); 402 } 403 404 #undef __FUNCT__ 405 #define __FUNCT__ "DMCreateGlobalVector" 406 /*@ 407 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 408 409 Collective on DM 410 411 Input Parameter: 412 . dm - the DM object 413 414 Output Parameter: 415 . vec - the global vector 416 417 Level: beginner 418 419 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 420 421 @*/ 422 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 423 { 424 PetscErrorCode ierr; 425 426 PetscFunctionBegin; 427 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 428 if (dm->defaultSection) { 429 PetscSection gSection; 430 PetscInt localSize; 431 432 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 433 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 434 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 435 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 436 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 437 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 438 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 439 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 440 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 441 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 442 } else { 443 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 444 } 445 PetscFunctionReturn(0); 446 } 447 448 #undef __FUNCT__ 449 #define __FUNCT__ "DMCreateLocalVector" 450 /*@ 451 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 452 453 Not Collective 454 455 Input Parameter: 456 . dm - the DM object 457 458 Output Parameter: 459 . vec - the local vector 460 461 Level: beginner 462 463 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 464 465 @*/ 466 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 467 { 468 PetscErrorCode ierr; 469 470 PetscFunctionBegin; 471 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 472 if (dm->defaultSection) { 473 PetscInt localSize; 474 475 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 476 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 477 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 478 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 479 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 480 } else { 481 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 482 } 483 PetscFunctionReturn(0); 484 } 485 486 #undef __FUNCT__ 487 #define __FUNCT__ "DMGetLocalToGlobalMapping" 488 /*@ 489 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 490 491 Collective on DM 492 493 Input Parameter: 494 . dm - the DM that provides the mapping 495 496 Output Parameter: 497 . ltog - the mapping 498 499 Level: intermediate 500 501 Notes: 502 This mapping can then be used by VecSetLocalToGlobalMapping() or 503 MatSetLocalToGlobalMapping(). 504 505 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 506 @*/ 507 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 508 { 509 PetscErrorCode ierr; 510 511 PetscFunctionBegin; 512 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 513 PetscValidPointer(ltog,2); 514 if (!dm->ltogmap) { 515 PetscSection section, sectionGlobal; 516 517 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 518 if (section) { 519 PetscInt *ltog; 520 PetscInt pStart, pEnd, size, p, l; 521 522 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 523 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 524 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 525 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 526 for(p = pStart, l = 0; p < pEnd; ++p) { 527 PetscInt dof, off, c; 528 529 /* Should probably use constrained dofs */ 530 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 531 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 532 for(c = 0; c < dof; ++c, ++l) { 533 ltog[l] = off+c; 534 } 535 } 536 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 537 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 538 } else { 539 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 540 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 541 } 542 } 543 *ltog = dm->ltogmap; 544 PetscFunctionReturn(0); 545 } 546 547 #undef __FUNCT__ 548 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 549 /*@ 550 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 551 552 Collective on DM 553 554 Input Parameter: 555 . da - the distributed array that provides the mapping 556 557 Output Parameter: 558 . ltog - the block mapping 559 560 Level: intermediate 561 562 Notes: 563 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 564 MatSetLocalToGlobalMappingBlock(). 565 566 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 567 @*/ 568 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 569 { 570 PetscErrorCode ierr; 571 572 PetscFunctionBegin; 573 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 574 PetscValidPointer(ltog,2); 575 if (!dm->ltogmapb) { 576 PetscInt bs; 577 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 578 if (bs > 1) { 579 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 580 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 581 } else { 582 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 583 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 584 } 585 } 586 *ltog = dm->ltogmapb; 587 PetscFunctionReturn(0); 588 } 589 590 #undef __FUNCT__ 591 #define __FUNCT__ "DMGetBlockSize" 592 /*@ 593 DMGetBlockSize - Gets the inherent block size associated with a DM 594 595 Not Collective 596 597 Input Parameter: 598 . dm - the DM with block structure 599 600 Output Parameter: 601 . bs - the block size, 1 implies no exploitable block structure 602 603 Level: intermediate 604 605 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 606 @*/ 607 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 608 { 609 PetscFunctionBegin; 610 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 611 PetscValidPointer(bs,2); 612 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 613 *bs = dm->bs; 614 PetscFunctionReturn(0); 615 } 616 617 #undef __FUNCT__ 618 #define __FUNCT__ "DMCreateInterpolation" 619 /*@ 620 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 621 622 Collective on DM 623 624 Input Parameter: 625 + dm1 - the DM object 626 - dm2 - the second, finer DM object 627 628 Output Parameter: 629 + mat - the interpolation 630 - vec - the scaling (optional) 631 632 Level: developer 633 634 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 635 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 636 637 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMDAGetCoordinateDA()) to interpolate the mesh coordinate vectors 638 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 639 640 641 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 642 643 @*/ 644 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 645 { 646 PetscErrorCode ierr; 647 648 PetscFunctionBegin; 649 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 650 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 651 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 652 PetscFunctionReturn(0); 653 } 654 655 #undef __FUNCT__ 656 #define __FUNCT__ "DMCreateInjection" 657 /*@ 658 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 659 660 Collective on DM 661 662 Input Parameter: 663 + dm1 - the DM object 664 - dm2 - the second, finer DM object 665 666 Output Parameter: 667 . ctx - the injection 668 669 Level: developer 670 671 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 672 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 673 674 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 675 676 @*/ 677 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 678 { 679 PetscErrorCode ierr; 680 681 PetscFunctionBegin; 682 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 683 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 684 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 685 PetscFunctionReturn(0); 686 } 687 688 #undef __FUNCT__ 689 #define __FUNCT__ "DMCreateColoring" 690 /*@C 691 DMCreateColoring - Gets coloring for a DMDA or DMComposite 692 693 Collective on DM 694 695 Input Parameter: 696 + dm - the DM object 697 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 698 - matype - either MATAIJ or MATBAIJ 699 700 Output Parameter: 701 . coloring - the coloring 702 703 Level: developer 704 705 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 706 707 @*/ 708 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring) 709 { 710 PetscErrorCode ierr; 711 712 PetscFunctionBegin; 713 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 714 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 715 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 716 PetscFunctionReturn(0); 717 } 718 719 #undef __FUNCT__ 720 #define __FUNCT__ "DMCreateMatrix" 721 /*@C 722 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 723 724 Collective on DM 725 726 Input Parameter: 727 + dm - the DM object 728 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 729 any type which inherits from one of these (such as MATAIJ) 730 731 Output Parameter: 732 . mat - the empty Jacobian 733 734 Level: beginner 735 736 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 737 do not need to do it yourself. 738 739 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 740 the nonzero pattern call DMDASetMatPreallocateOnly() 741 742 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 743 internally by PETSc. 744 745 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 746 the indices for the global numbering for DMDAs which is complicated. 747 748 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 749 750 @*/ 751 PetscErrorCode DMCreateMatrix(DM dm,const MatType mtype,Mat *mat) 752 { 753 PetscErrorCode ierr; 754 755 PetscFunctionBegin; 756 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 757 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 758 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 759 #endif 760 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 761 PetscValidPointer(mat,3); 762 if (dm->mattype) { 763 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 764 } else { 765 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 766 } 767 PetscFunctionReturn(0); 768 } 769 770 #undef __FUNCT__ 771 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 772 /*@ 773 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 774 preallocated but the nonzero structure and zero values will not be set. 775 776 Logically Collective on DMDA 777 778 Input Parameter: 779 + dm - the DM 780 - only - PETSC_TRUE if only want preallocation 781 782 Level: developer 783 .seealso DMCreateMatrix() 784 @*/ 785 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 786 { 787 PetscFunctionBegin; 788 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 789 dm->prealloc_only = only; 790 PetscFunctionReturn(0); 791 } 792 793 #undef __FUNCT__ 794 #define __FUNCT__ "DMGetWorkArray" 795 /*@C 796 DMGetWorkArray - Gets a work array guaranteed to be at least the input size 797 798 Not Collective 799 800 Input Parameters: 801 + dm - the DM object 802 - size - The minium size 803 804 Output Parameter: 805 . array - the work array 806 807 Level: developer 808 809 .seealso DMDestroy(), DMCreate() 810 @*/ 811 PetscErrorCode DMGetWorkArray(DM dm,PetscInt size,PetscScalar **array) 812 { 813 PetscErrorCode ierr; 814 815 PetscFunctionBegin; 816 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 817 PetscValidPointer(array,3); 818 if (size > dm->workSize) { 819 dm->workSize = size; 820 ierr = PetscFree(dm->workArray);CHKERRQ(ierr); 821 ierr = PetscMalloc(dm->workSize * sizeof(PetscScalar), &dm->workArray);CHKERRQ(ierr); 822 } 823 *array = dm->workArray; 824 PetscFunctionReturn(0); 825 } 826 827 828 #undef __FUNCT__ 829 #define __FUNCT__ "DMSetNullSpaceConstructor" 830 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 831 { 832 PetscFunctionBegin; 833 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 834 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 835 dm->nullspaceConstructors[field] = nullsp; 836 PetscFunctionReturn(0); 837 } 838 839 #undef __FUNCT__ 840 #define __FUNCT__ "DMCreateFieldIS" 841 /*@C 842 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 843 844 Not collective 845 846 Input Parameter: 847 . dm - the DM object 848 849 Output Parameters: 850 + numFields - The number of fields (or PETSC_NULL if not requested) 851 . fieldNames - The name for each field (or PETSC_NULL if not requested) 852 - fields - The global indices for each field (or PETSC_NULL if not requested) 853 854 Level: intermediate 855 856 Notes: 857 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 858 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 859 PetscFree(). 860 861 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 862 @*/ 863 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 864 { 865 PetscSection section, sectionGlobal; 866 PetscErrorCode ierr; 867 868 PetscFunctionBegin; 869 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 870 if (numFields) { 871 PetscValidPointer(numFields,2); 872 *numFields = 0; 873 } 874 if (fieldNames) { 875 PetscValidPointer(fieldNames,3); 876 *fieldNames = PETSC_NULL; 877 } 878 if (fields) { 879 PetscValidPointer(fields,4); 880 *fields = PETSC_NULL; 881 } 882 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 883 if (section) { 884 PetscInt *fieldSizes, **fieldIndices; 885 PetscInt nF, f, pStart, pEnd, p; 886 887 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 888 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 889 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 890 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 891 for(f = 0; f < nF; ++f) { 892 fieldSizes[f] = 0; 893 } 894 for(p = pStart; p < pEnd; ++p) { 895 PetscInt gdof; 896 897 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 898 if (gdof > 0) { 899 for(f = 0; f < nF; ++f) { 900 PetscInt fdof, fcdof; 901 902 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 903 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 904 fieldSizes[f] += fdof-fcdof; 905 } 906 } 907 } 908 for(f = 0; f < nF; ++f) { 909 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 910 fieldSizes[f] = 0; 911 } 912 for(p = pStart; p < pEnd; ++p) { 913 PetscInt gdof, goff; 914 915 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 916 if (gdof > 0) { 917 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 918 for(f = 0; f < nF; ++f) { 919 PetscInt fdof, fcdof, fc; 920 921 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 922 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 923 for(fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 924 fieldIndices[f][fieldSizes[f]] = goff++; 925 } 926 } 927 } 928 } 929 if (numFields) {*numFields = nF;} 930 if (fieldNames) { 931 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 932 for(f = 0; f < nF; ++f) { 933 const char *fieldName; 934 935 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 936 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 937 } 938 } 939 if (fields) { 940 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 941 for(f = 0; f < nF; ++f) { 942 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 943 } 944 } 945 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 946 } else { 947 if(dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 948 } 949 PetscFunctionReturn(0); 950 } 951 952 #undef __FUNCT__ 953 #define __FUNCT__ "DMCreateFieldDecompositionDM" 954 /*@C 955 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 956 957 Not Collective 958 959 Input Parameters: 960 + dm - the DM object 961 - name - the name of the field decomposition 962 963 Output Parameter: 964 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 965 966 Level: advanced 967 968 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 969 @*/ 970 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 971 { 972 PetscErrorCode ierr; 973 974 PetscFunctionBegin; 975 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 976 PetscValidCharPointer(name,2); 977 PetscValidPointer(ddm,3); 978 *ddm = PETSC_NULL; 979 if(dm->ops->createfielddecompositiondm) { 980 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 981 } 982 PetscFunctionReturn(0); 983 } 984 985 #undef __FUNCT__ 986 #define __FUNCT__ "DMCreateFieldDecomposition" 987 /*@C 988 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 989 corresponding to different fields: each IS contains the global indices of the dofs of the 990 corresponding field. The optional list of DMs define the DM for each subproblem. 991 Generalizes DMCreateFieldIS(). 992 993 Not collective 994 995 Input Parameter: 996 . dm - the DM object 997 998 Output Parameters: 999 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1000 . namelist - The name for each field (or PETSC_NULL if not requested) 1001 . islist - The global indices for each field (or PETSC_NULL if not requested) 1002 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1003 1004 Level: intermediate 1005 1006 Notes: 1007 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1008 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1009 and all of the arrays should be freed with PetscFree(). 1010 1011 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1012 @*/ 1013 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1014 { 1015 PetscErrorCode ierr; 1016 1017 PetscFunctionBegin; 1018 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1019 if (len) {PetscValidPointer(len,2); *len = 0;} 1020 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1021 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1022 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1023 if (!dm->ops->createfielddecomposition) { 1024 PetscSection section; 1025 PetscInt numFields, f; 1026 1027 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1028 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1029 if (section && numFields && dm->ops->createsubdm) { 1030 *len = numFields; 1031 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1032 for(f = 0; f < numFields; ++f) { 1033 const char *fieldName; 1034 1035 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1036 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1037 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1038 } 1039 } else { 1040 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1041 /* By default there are no DMs associated with subproblems. */ 1042 if(dmlist) *dmlist = PETSC_NULL; 1043 } 1044 } 1045 else { 1046 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1047 } 1048 PetscFunctionReturn(0); 1049 } 1050 1051 #undef __FUNCT__ 1052 #define __FUNCT__ "DMCreateSubDM" 1053 /*@C 1054 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1055 The fields are defined by DMCreateFieldIS(). 1056 1057 Not collective 1058 1059 Input Parameters: 1060 + dm - the DM object 1061 . numFields - number of fields in this subproblem 1062 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1063 1064 Output Parameters: 1065 . is - The global indices for the subproblem 1066 - dm - The DM for the subproblem 1067 1068 Level: intermediate 1069 1070 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1071 @*/ 1072 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1073 { 1074 PetscErrorCode ierr; 1075 1076 PetscFunctionBegin; 1077 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1078 PetscValidPointer(fields,3); 1079 if (is) {PetscValidPointer(is,4);} 1080 if (subdm) {PetscValidPointer(subdm,5);} 1081 if (dm->ops->createsubdm) { 1082 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1083 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1084 PetscFunctionReturn(0); 1085 } 1086 1087 #undef __FUNCT__ 1088 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1089 /*@C 1090 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1091 1092 Not Collective 1093 1094 Input Parameters: 1095 + dm - the DM object 1096 - name - the name of the subdomain decomposition 1097 1098 Output Parameter: 1099 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1100 1101 Level: advanced 1102 1103 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1104 @*/ 1105 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1106 { 1107 PetscErrorCode ierr; 1108 1109 PetscFunctionBegin; 1110 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1111 PetscValidCharPointer(name,2); 1112 PetscValidPointer(ddm,3); 1113 *ddm = PETSC_NULL; 1114 if(dm->ops->createdomaindecompositiondm) { 1115 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1116 } 1117 PetscFunctionReturn(0); 1118 } 1119 1120 #undef __FUNCT__ 1121 #define __FUNCT__ "DMCreateDomainDecomposition" 1122 /*@C 1123 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1124 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1125 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1126 define a nonoverlapping covering, while outer subdomains can overlap. 1127 The optional list of DMs define the DM for each subproblem. 1128 1129 Not collective 1130 1131 Input Parameter: 1132 . dm - the DM object 1133 1134 Output Parameters: 1135 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1136 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1137 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1138 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1139 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1140 1141 Level: intermediate 1142 1143 Notes: 1144 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1145 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1146 and all of the arrays should be freed with PetscFree(). 1147 1148 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1149 @*/ 1150 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1151 { 1152 PetscErrorCode ierr; 1153 1154 PetscFunctionBegin; 1155 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1156 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1157 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1158 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1159 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1160 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1161 if(dm->ops->createdomaindecomposition) { 1162 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1163 } 1164 PetscFunctionReturn(0); 1165 } 1166 1167 #undef __FUNCT__ 1168 #define __FUNCT__ "DMRefine" 1169 /*@ 1170 DMRefine - Refines a DM object 1171 1172 Collective on DM 1173 1174 Input Parameter: 1175 + dm - the DM object 1176 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1177 1178 Output Parameter: 1179 . dmf - the refined DM, or PETSC_NULL 1180 1181 Note: If no refinement was done, the return value is PETSC_NULL 1182 1183 Level: developer 1184 1185 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1186 @*/ 1187 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1188 { 1189 PetscErrorCode ierr; 1190 DMRefineHookLink link; 1191 1192 PetscFunctionBegin; 1193 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1194 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1195 if (*dmf) { 1196 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1197 (*dmf)->ops->initialguess = dm->ops->initialguess; 1198 (*dmf)->ops->function = dm->ops->function; 1199 (*dmf)->ops->functionj = dm->ops->functionj; 1200 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1201 (*dmf)->ops->jacobian = dm->ops->jacobian; 1202 } 1203 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1204 (*dmf)->ctx = dm->ctx; 1205 (*dmf)->leveldown = dm->leveldown; 1206 (*dmf)->levelup = dm->levelup + 1; 1207 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1208 for (link=dm->refinehook; link; link=link->next) { 1209 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1210 } 1211 } 1212 PetscFunctionReturn(0); 1213 } 1214 1215 #undef __FUNCT__ 1216 #define __FUNCT__ "DMRefineHookAdd" 1217 /*@ 1218 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1219 1220 Logically Collective 1221 1222 Input Arguments: 1223 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1224 . refinehook - function to run when setting up a coarser level 1225 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1226 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1227 1228 Calling sequence of refinehook: 1229 $ refinehook(DM coarse,DM fine,void *ctx); 1230 1231 + coarse - coarse level DM 1232 . fine - fine level DM to interpolate problem to 1233 - ctx - optional user-defined function context 1234 1235 Calling sequence for interphook: 1236 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1237 1238 + coarse - coarse level DM 1239 . interp - matrix interpolating a coarse-level solution to the finer grid 1240 . fine - fine level DM to update 1241 - ctx - optional user-defined function context 1242 1243 Level: advanced 1244 1245 Notes: 1246 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1247 1248 If this function is called multiple times, the hooks will be run in the order they are added. 1249 1250 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1251 @*/ 1252 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1253 { 1254 PetscErrorCode ierr; 1255 DMRefineHookLink link,*p; 1256 1257 PetscFunctionBegin; 1258 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1259 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1260 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1261 link->refinehook = refinehook; 1262 link->interphook = interphook; 1263 link->ctx = ctx; 1264 link->next = PETSC_NULL; 1265 *p = link; 1266 PetscFunctionReturn(0); 1267 } 1268 1269 #undef __FUNCT__ 1270 #define __FUNCT__ "DMInterpolate" 1271 /*@ 1272 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1273 1274 Collective if any hooks are 1275 1276 Input Arguments: 1277 + coarse - coarser DM to use as a base 1278 . restrct - interpolation matrix, apply using MatInterpolate() 1279 - fine - finer DM to update 1280 1281 Level: developer 1282 1283 .seealso: DMRefineHookAdd(), MatInterpolate() 1284 @*/ 1285 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1286 { 1287 PetscErrorCode ierr; 1288 DMRefineHookLink link; 1289 1290 PetscFunctionBegin; 1291 for (link=fine->refinehook; link; link=link->next) { 1292 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1293 } 1294 PetscFunctionReturn(0); 1295 } 1296 1297 #undef __FUNCT__ 1298 #define __FUNCT__ "DMGetRefineLevel" 1299 /*@ 1300 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1301 1302 Not Collective 1303 1304 Input Parameter: 1305 . dm - the DM object 1306 1307 Output Parameter: 1308 . level - number of refinements 1309 1310 Level: developer 1311 1312 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1313 1314 @*/ 1315 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1316 { 1317 PetscFunctionBegin; 1318 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1319 *level = dm->levelup; 1320 PetscFunctionReturn(0); 1321 } 1322 1323 #undef __FUNCT__ 1324 #define __FUNCT__ "DMGlobalToLocalBegin" 1325 /*@ 1326 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1327 1328 Neighbor-wise Collective on DM 1329 1330 Input Parameters: 1331 + dm - the DM object 1332 . g - the global vector 1333 . mode - INSERT_VALUES or ADD_VALUES 1334 - l - the local vector 1335 1336 1337 Level: beginner 1338 1339 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1340 1341 @*/ 1342 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1343 { 1344 PetscSF sf; 1345 PetscErrorCode ierr; 1346 1347 PetscFunctionBegin; 1348 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1349 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1350 if (sf) { 1351 PetscScalar *lArray, *gArray; 1352 1353 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1354 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1355 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1356 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1357 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1358 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1359 } else { 1360 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1361 } 1362 PetscFunctionReturn(0); 1363 } 1364 1365 #undef __FUNCT__ 1366 #define __FUNCT__ "DMGlobalToLocalEnd" 1367 /*@ 1368 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1369 1370 Neighbor-wise Collective on DM 1371 1372 Input Parameters: 1373 + dm - the DM object 1374 . g - the global vector 1375 . mode - INSERT_VALUES or ADD_VALUES 1376 - l - the local vector 1377 1378 1379 Level: beginner 1380 1381 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1382 1383 @*/ 1384 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1385 { 1386 PetscSF sf; 1387 PetscErrorCode ierr; 1388 PetscScalar *lArray, *gArray; 1389 1390 PetscFunctionBegin; 1391 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1392 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1393 if (sf) { 1394 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1395 1396 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1397 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1398 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1399 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1400 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1401 } else { 1402 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1403 } 1404 PetscFunctionReturn(0); 1405 } 1406 1407 #undef __FUNCT__ 1408 #define __FUNCT__ "DMLocalToGlobalBegin" 1409 /*@ 1410 DMLocalToGlobalBegin - updates global vectors from local vectors 1411 1412 Neighbor-wise Collective on DM 1413 1414 Input Parameters: 1415 + dm - the DM object 1416 . l - the local vector 1417 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1418 base point. 1419 - - the global vector 1420 1421 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1422 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1423 global array to the final global array with VecAXPY(). 1424 1425 Level: beginner 1426 1427 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1428 1429 @*/ 1430 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1431 { 1432 PetscSF sf; 1433 PetscErrorCode ierr; 1434 1435 PetscFunctionBegin; 1436 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1437 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1438 if (sf) { 1439 MPI_Op op; 1440 PetscScalar *lArray, *gArray; 1441 1442 switch(mode) { 1443 case INSERT_VALUES: 1444 case INSERT_ALL_VALUES: 1445 #if defined(PETSC_HAVE_MPI_REPLACE) 1446 op = MPI_REPLACE; break; 1447 #else 1448 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1449 #endif 1450 case ADD_VALUES: 1451 case ADD_ALL_VALUES: 1452 op = MPI_SUM; break; 1453 default: 1454 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1455 } 1456 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1457 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1458 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1459 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1460 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1461 } else { 1462 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1463 } 1464 PetscFunctionReturn(0); 1465 } 1466 1467 #undef __FUNCT__ 1468 #define __FUNCT__ "DMLocalToGlobalEnd" 1469 /*@ 1470 DMLocalToGlobalEnd - updates global vectors from local vectors 1471 1472 Neighbor-wise Collective on DM 1473 1474 Input Parameters: 1475 + dm - the DM object 1476 . l - the local vector 1477 . mode - INSERT_VALUES or ADD_VALUES 1478 - g - the global vector 1479 1480 1481 Level: beginner 1482 1483 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1484 1485 @*/ 1486 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1487 { 1488 PetscSF sf; 1489 PetscErrorCode ierr; 1490 1491 PetscFunctionBegin; 1492 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1493 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1494 if (sf) { 1495 MPI_Op op; 1496 PetscScalar *lArray, *gArray; 1497 1498 switch(mode) { 1499 case INSERT_VALUES: 1500 case INSERT_ALL_VALUES: 1501 #if defined(PETSC_HAVE_MPI_REPLACE) 1502 op = MPI_REPLACE; break; 1503 #else 1504 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1505 #endif 1506 case ADD_VALUES: 1507 case ADD_ALL_VALUES: 1508 op = MPI_SUM; break; 1509 default: 1510 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1511 } 1512 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1513 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1514 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1515 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1516 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1517 } else { 1518 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1519 } 1520 PetscFunctionReturn(0); 1521 } 1522 1523 #undef __FUNCT__ 1524 #define __FUNCT__ "DMComputeJacobianDefault" 1525 /*@ 1526 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1527 1528 Collective on DM 1529 1530 Input Parameter: 1531 + dm - the DM object 1532 . x - location to compute Jacobian at; may be ignored for linear problems 1533 . A - matrix that defines the operator for the linear solve 1534 - B - the matrix used to construct the preconditioner 1535 1536 Level: developer 1537 1538 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1539 DMSetFunction() 1540 1541 @*/ 1542 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1543 { 1544 PetscErrorCode ierr; 1545 1546 PetscFunctionBegin; 1547 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1548 *stflag = SAME_NONZERO_PATTERN; 1549 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1550 if (A != B) { 1551 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1552 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1553 } 1554 PetscFunctionReturn(0); 1555 } 1556 1557 #undef __FUNCT__ 1558 #define __FUNCT__ "DMCoarsen" 1559 /*@ 1560 DMCoarsen - Coarsens a DM object 1561 1562 Collective on DM 1563 1564 Input Parameter: 1565 + dm - the DM object 1566 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1567 1568 Output Parameter: 1569 . dmc - the coarsened DM 1570 1571 Level: developer 1572 1573 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1574 1575 @*/ 1576 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1577 { 1578 PetscErrorCode ierr; 1579 DMCoarsenHookLink link; 1580 1581 PetscFunctionBegin; 1582 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1583 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1584 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1585 (*dmc)->ops->initialguess = dm->ops->initialguess; 1586 (*dmc)->ops->function = dm->ops->function; 1587 (*dmc)->ops->functionj = dm->ops->functionj; 1588 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1589 (*dmc)->ops->jacobian = dm->ops->jacobian; 1590 } 1591 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1592 (*dmc)->ctx = dm->ctx; 1593 (*dmc)->levelup = dm->levelup; 1594 (*dmc)->leveldown = dm->leveldown + 1; 1595 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1596 for (link=dm->coarsenhook; link; link=link->next) { 1597 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1598 } 1599 PetscFunctionReturn(0); 1600 } 1601 1602 #undef __FUNCT__ 1603 #define __FUNCT__ "DMCoarsenHookAdd" 1604 /*@ 1605 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1606 1607 Logically Collective 1608 1609 Input Arguments: 1610 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1611 . coarsenhook - function to run when setting up a coarser level 1612 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1613 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1614 1615 Calling sequence of coarsenhook: 1616 $ coarsenhook(DM fine,DM coarse,void *ctx); 1617 1618 + fine - fine level DM 1619 . coarse - coarse level DM to restrict problem to 1620 - ctx - optional user-defined function context 1621 1622 Calling sequence for restricthook: 1623 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1624 1625 + fine - fine level DM 1626 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1627 . rscale - scaling vector for restriction 1628 . inject - matrix restricting by injection 1629 . coarse - coarse level DM to update 1630 - ctx - optional user-defined function context 1631 1632 Level: advanced 1633 1634 Notes: 1635 This function is only needed if auxiliary data needs to be set up on coarse grids. 1636 1637 If this function is called multiple times, the hooks will be run in the order they are added. 1638 1639 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1640 extract the finest level information from its context (instead of from the SNES). 1641 1642 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1643 @*/ 1644 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1645 { 1646 PetscErrorCode ierr; 1647 DMCoarsenHookLink link,*p; 1648 1649 PetscFunctionBegin; 1650 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1651 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1652 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1653 link->coarsenhook = coarsenhook; 1654 link->restricthook = restricthook; 1655 link->ctx = ctx; 1656 link->next = PETSC_NULL; 1657 *p = link; 1658 PetscFunctionReturn(0); 1659 } 1660 1661 #undef __FUNCT__ 1662 #define __FUNCT__ "DMRestrict" 1663 /*@ 1664 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1665 1666 Collective if any hooks are 1667 1668 Input Arguments: 1669 + fine - finer DM to use as a base 1670 . restrct - restriction matrix, apply using MatRestrict() 1671 . inject - injection matrix, also use MatRestrict() 1672 - coarse - coarer DM to update 1673 1674 Level: developer 1675 1676 .seealso: DMCoarsenHookAdd(), MatRestrict() 1677 @*/ 1678 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1679 { 1680 PetscErrorCode ierr; 1681 DMCoarsenHookLink link; 1682 1683 PetscFunctionBegin; 1684 for (link=fine->coarsenhook; link; link=link->next) { 1685 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1686 } 1687 PetscFunctionReturn(0); 1688 } 1689 1690 #undef __FUNCT__ 1691 #define __FUNCT__ "DMGetCoarsenLevel" 1692 /*@ 1693 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1694 1695 Not Collective 1696 1697 Input Parameter: 1698 . dm - the DM object 1699 1700 Output Parameter: 1701 . level - number of coarsenings 1702 1703 Level: developer 1704 1705 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1706 1707 @*/ 1708 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 1709 { 1710 PetscFunctionBegin; 1711 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1712 *level = dm->leveldown; 1713 PetscFunctionReturn(0); 1714 } 1715 1716 1717 1718 #undef __FUNCT__ 1719 #define __FUNCT__ "DMRefineHierarchy" 1720 /*@C 1721 DMRefineHierarchy - Refines a DM object, all levels at once 1722 1723 Collective on DM 1724 1725 Input Parameter: 1726 + dm - the DM object 1727 - nlevels - the number of levels of refinement 1728 1729 Output Parameter: 1730 . dmf - the refined DM hierarchy 1731 1732 Level: developer 1733 1734 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1735 1736 @*/ 1737 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 1738 { 1739 PetscErrorCode ierr; 1740 1741 PetscFunctionBegin; 1742 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1743 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1744 if (nlevels == 0) PetscFunctionReturn(0); 1745 if (dm->ops->refinehierarchy) { 1746 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 1747 } else if (dm->ops->refine) { 1748 PetscInt i; 1749 1750 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 1751 for (i=1; i<nlevels; i++) { 1752 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 1753 } 1754 } else { 1755 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 1756 } 1757 PetscFunctionReturn(0); 1758 } 1759 1760 #undef __FUNCT__ 1761 #define __FUNCT__ "DMCoarsenHierarchy" 1762 /*@C 1763 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 1764 1765 Collective on DM 1766 1767 Input Parameter: 1768 + dm - the DM object 1769 - nlevels - the number of levels of coarsening 1770 1771 Output Parameter: 1772 . dmc - the coarsened DM hierarchy 1773 1774 Level: developer 1775 1776 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1777 1778 @*/ 1779 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 1780 { 1781 PetscErrorCode ierr; 1782 1783 PetscFunctionBegin; 1784 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1785 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1786 if (nlevels == 0) PetscFunctionReturn(0); 1787 PetscValidPointer(dmc,3); 1788 if (dm->ops->coarsenhierarchy) { 1789 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 1790 } else if (dm->ops->coarsen) { 1791 PetscInt i; 1792 1793 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 1794 for (i=1; i<nlevels; i++) { 1795 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 1796 } 1797 } else { 1798 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 1799 } 1800 PetscFunctionReturn(0); 1801 } 1802 1803 #undef __FUNCT__ 1804 #define __FUNCT__ "DMCreateAggregates" 1805 /*@ 1806 DMCreateAggregates - Gets the aggregates that map between 1807 grids associated with two DMs. 1808 1809 Collective on DM 1810 1811 Input Parameters: 1812 + dmc - the coarse grid DM 1813 - dmf - the fine grid DM 1814 1815 Output Parameters: 1816 . rest - the restriction matrix (transpose of the projection matrix) 1817 1818 Level: intermediate 1819 1820 .keywords: interpolation, restriction, multigrid 1821 1822 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 1823 @*/ 1824 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 1825 { 1826 PetscErrorCode ierr; 1827 1828 PetscFunctionBegin; 1829 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 1830 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 1831 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 1832 PetscFunctionReturn(0); 1833 } 1834 1835 #undef __FUNCT__ 1836 #define __FUNCT__ "DMSetApplicationContextDestroy" 1837 /*@C 1838 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 1839 1840 Not Collective 1841 1842 Input Parameters: 1843 + dm - the DM object 1844 - destroy - the destroy function 1845 1846 Level: intermediate 1847 1848 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1849 1850 @*/ 1851 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 1852 { 1853 PetscFunctionBegin; 1854 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1855 dm->ctxdestroy = destroy; 1856 PetscFunctionReturn(0); 1857 } 1858 1859 #undef __FUNCT__ 1860 #define __FUNCT__ "DMSetApplicationContext" 1861 /*@ 1862 DMSetApplicationContext - Set a user context into a DM object 1863 1864 Not Collective 1865 1866 Input Parameters: 1867 + dm - the DM object 1868 - ctx - the user context 1869 1870 Level: intermediate 1871 1872 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1873 1874 @*/ 1875 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 1876 { 1877 PetscFunctionBegin; 1878 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1879 dm->ctx = ctx; 1880 PetscFunctionReturn(0); 1881 } 1882 1883 #undef __FUNCT__ 1884 #define __FUNCT__ "DMGetApplicationContext" 1885 /*@ 1886 DMGetApplicationContext - Gets a user context from a DM object 1887 1888 Not Collective 1889 1890 Input Parameter: 1891 . dm - the DM object 1892 1893 Output Parameter: 1894 . ctx - the user context 1895 1896 Level: intermediate 1897 1898 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1899 1900 @*/ 1901 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 1902 { 1903 PetscFunctionBegin; 1904 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1905 *(void**)ctx = dm->ctx; 1906 PetscFunctionReturn(0); 1907 } 1908 1909 #undef __FUNCT__ 1910 #define __FUNCT__ "DMSetInitialGuess" 1911 /*@C 1912 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 1913 1914 Logically Collective on DM 1915 1916 Input Parameter: 1917 + dm - the DM object to destroy 1918 - f - the function to compute the initial guess 1919 1920 Level: intermediate 1921 1922 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 1923 1924 @*/ 1925 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 1926 { 1927 PetscFunctionBegin; 1928 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1929 dm->ops->initialguess = f; 1930 PetscFunctionReturn(0); 1931 } 1932 1933 #undef __FUNCT__ 1934 #define __FUNCT__ "DMSetFunction" 1935 /*@C 1936 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 1937 1938 Logically Collective on DM 1939 1940 Input Parameter: 1941 + dm - the DM object 1942 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 1943 1944 Level: intermediate 1945 1946 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 1947 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 1948 1949 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1950 DMSetJacobian() 1951 1952 @*/ 1953 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 1954 { 1955 PetscFunctionBegin; 1956 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1957 dm->ops->function = f; 1958 if (f) { 1959 dm->ops->functionj = f; 1960 } 1961 PetscFunctionReturn(0); 1962 } 1963 1964 #undef __FUNCT__ 1965 #define __FUNCT__ "DMSetJacobian" 1966 /*@C 1967 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 1968 1969 Logically Collective on DM 1970 1971 Input Parameter: 1972 + dm - the DM object to destroy 1973 - f - the function to compute the matrix entries 1974 1975 Level: intermediate 1976 1977 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1978 DMSetFunction() 1979 1980 @*/ 1981 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 1982 { 1983 PetscFunctionBegin; 1984 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1985 dm->ops->jacobian = f; 1986 PetscFunctionReturn(0); 1987 } 1988 1989 #undef __FUNCT__ 1990 #define __FUNCT__ "DMSetVariableBounds" 1991 /*@C 1992 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 1993 1994 Logically Collective on DM 1995 1996 Input Parameter: 1997 + dm - the DM object 1998 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 1999 2000 Level: intermediate 2001 2002 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2003 DMSetJacobian() 2004 2005 @*/ 2006 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2007 { 2008 PetscFunctionBegin; 2009 dm->ops->computevariablebounds = f; 2010 PetscFunctionReturn(0); 2011 } 2012 2013 #undef __FUNCT__ 2014 #define __FUNCT__ "DMHasVariableBounds" 2015 /*@ 2016 DMHasVariableBounds - does the DM object have a variable bounds function? 2017 2018 Not Collective 2019 2020 Input Parameter: 2021 . dm - the DM object to destroy 2022 2023 Output Parameter: 2024 . flg - PETSC_TRUE if the variable bounds function exists 2025 2026 Level: developer 2027 2028 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2029 2030 @*/ 2031 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2032 { 2033 PetscFunctionBegin; 2034 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2035 PetscFunctionReturn(0); 2036 } 2037 2038 #undef __FUNCT__ 2039 #define __FUNCT__ "DMComputeVariableBounds" 2040 /*@C 2041 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2042 2043 Logically Collective on DM 2044 2045 Input Parameters: 2046 + dm - the DM object to destroy 2047 - x - current solution at which the bounds are computed 2048 2049 Output parameters: 2050 + xl - lower bound 2051 - xu - upper bound 2052 2053 Level: intermediate 2054 2055 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2056 DMSetFunction(), DMSetVariableBounds() 2057 2058 @*/ 2059 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2060 { 2061 PetscErrorCode ierr; 2062 PetscFunctionBegin; 2063 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2064 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2065 if(dm->ops->computevariablebounds) { 2066 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2067 } 2068 else { 2069 ierr = VecSet(xl,SNES_VI_NINF); CHKERRQ(ierr); 2070 ierr = VecSet(xu,SNES_VI_INF); CHKERRQ(ierr); 2071 } 2072 PetscFunctionReturn(0); 2073 } 2074 2075 #undef __FUNCT__ 2076 #define __FUNCT__ "DMComputeInitialGuess" 2077 /*@ 2078 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2079 2080 Collective on DM 2081 2082 Input Parameter: 2083 + dm - the DM object to destroy 2084 - x - the vector to hold the initial guess values 2085 2086 Level: developer 2087 2088 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2089 2090 @*/ 2091 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2092 { 2093 PetscErrorCode ierr; 2094 PetscFunctionBegin; 2095 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2096 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2097 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2098 PetscFunctionReturn(0); 2099 } 2100 2101 #undef __FUNCT__ 2102 #define __FUNCT__ "DMHasInitialGuess" 2103 /*@ 2104 DMHasInitialGuess - does the DM object have an initial guess function 2105 2106 Not Collective 2107 2108 Input Parameter: 2109 . dm - the DM object to destroy 2110 2111 Output Parameter: 2112 . flg - PETSC_TRUE if function exists 2113 2114 Level: developer 2115 2116 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2117 2118 @*/ 2119 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2120 { 2121 PetscFunctionBegin; 2122 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2123 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2124 PetscFunctionReturn(0); 2125 } 2126 2127 #undef __FUNCT__ 2128 #define __FUNCT__ "DMHasFunction" 2129 /*@ 2130 DMHasFunction - does the DM object have a function 2131 2132 Not Collective 2133 2134 Input Parameter: 2135 . dm - the DM object to destroy 2136 2137 Output Parameter: 2138 . flg - PETSC_TRUE if function exists 2139 2140 Level: developer 2141 2142 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2143 2144 @*/ 2145 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2146 { 2147 PetscFunctionBegin; 2148 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2149 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2150 PetscFunctionReturn(0); 2151 } 2152 2153 #undef __FUNCT__ 2154 #define __FUNCT__ "DMHasJacobian" 2155 /*@ 2156 DMHasJacobian - does the DM object have a matrix function 2157 2158 Not Collective 2159 2160 Input Parameter: 2161 . dm - the DM object to destroy 2162 2163 Output Parameter: 2164 . flg - PETSC_TRUE if function exists 2165 2166 Level: developer 2167 2168 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2169 2170 @*/ 2171 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2172 { 2173 PetscFunctionBegin; 2174 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2175 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2176 PetscFunctionReturn(0); 2177 } 2178 2179 #undef __FUNCT__ 2180 #define __FUNCT__ "DMHasColoring" 2181 /*@ 2182 DMHasColoring - does the DM object have a method of providing a coloring? 2183 2184 Not Collective 2185 2186 Input Parameter: 2187 . dm - the DM object 2188 2189 Output Parameter: 2190 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2191 2192 Level: developer 2193 2194 .seealso DMHasFunction(), DMCreateColoring() 2195 2196 @*/ 2197 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2198 { 2199 PetscFunctionBegin; 2200 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2201 PetscFunctionReturn(0); 2202 } 2203 2204 #undef __FUNCT__ 2205 #define __FUNCT__ "DMSetVec" 2206 /*@C 2207 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2208 2209 Collective on DM 2210 2211 Input Parameter: 2212 + dm - the DM object 2213 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2214 2215 Level: developer 2216 2217 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2218 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2219 2220 @*/ 2221 PetscErrorCode DMSetVec(DM dm,Vec x) 2222 { 2223 PetscErrorCode ierr; 2224 PetscFunctionBegin; 2225 if (x) { 2226 if (!dm->x) { 2227 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2228 } 2229 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2230 } 2231 else if(dm->x) { 2232 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2233 } 2234 PetscFunctionReturn(0); 2235 } 2236 2237 2238 #undef __FUNCT__ 2239 #define __FUNCT__ "DMComputeFunction" 2240 /*@ 2241 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2242 2243 Collective on DM 2244 2245 Input Parameter: 2246 + dm - the DM object to destroy 2247 . x - the location where the function is evaluationed, may be ignored for linear problems 2248 - b - the vector to hold the right hand side entries 2249 2250 Level: developer 2251 2252 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2253 DMSetJacobian() 2254 2255 @*/ 2256 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2257 { 2258 PetscErrorCode ierr; 2259 PetscFunctionBegin; 2260 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2261 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2262 PetscStackPush("DM user function"); 2263 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2264 PetscStackPop; 2265 PetscFunctionReturn(0); 2266 } 2267 2268 2269 2270 #undef __FUNCT__ 2271 #define __FUNCT__ "DMComputeJacobian" 2272 /*@ 2273 DMComputeJacobian - compute the matrix entries for the solver 2274 2275 Collective on DM 2276 2277 Input Parameter: 2278 + dm - the DM object 2279 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2280 . A - matrix that defines the operator for the linear solve 2281 - B - the matrix used to construct the preconditioner 2282 2283 Level: developer 2284 2285 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2286 DMSetFunction() 2287 2288 @*/ 2289 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2290 { 2291 PetscErrorCode ierr; 2292 2293 PetscFunctionBegin; 2294 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2295 if (!dm->ops->jacobian) { 2296 ISColoring coloring; 2297 MatFDColoring fd; 2298 const MatType mtype; 2299 2300 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2301 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2302 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2303 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2304 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2305 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2306 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2307 2308 dm->fd = fd; 2309 dm->ops->jacobian = DMComputeJacobianDefault; 2310 2311 /* don't know why this is needed */ 2312 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2313 } 2314 if (!x) x = dm->x; 2315 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2316 2317 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2318 if (x) { 2319 if (!dm->x) { 2320 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2321 } 2322 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2323 } 2324 if (A != B) { 2325 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2326 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2327 } 2328 PetscFunctionReturn(0); 2329 } 2330 2331 2332 PetscFList DMList = PETSC_NULL; 2333 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2334 2335 #undef __FUNCT__ 2336 #define __FUNCT__ "DMSetType" 2337 /*@C 2338 DMSetType - Builds a DM, for a particular DM implementation. 2339 2340 Collective on DM 2341 2342 Input Parameters: 2343 + dm - The DM object 2344 - method - The name of the DM type 2345 2346 Options Database Key: 2347 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2348 2349 Notes: 2350 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2351 2352 Level: intermediate 2353 2354 .keywords: DM, set, type 2355 .seealso: DMGetType(), DMCreate() 2356 @*/ 2357 PetscErrorCode DMSetType(DM dm, const DMType method) 2358 { 2359 PetscErrorCode (*r)(DM); 2360 PetscBool match; 2361 PetscErrorCode ierr; 2362 2363 PetscFunctionBegin; 2364 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2365 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2366 if (match) PetscFunctionReturn(0); 2367 2368 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2369 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2370 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2371 2372 if (dm->ops->destroy) { 2373 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2374 dm->ops->destroy = PETSC_NULL; 2375 } 2376 ierr = (*r)(dm);CHKERRQ(ierr); 2377 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2378 PetscFunctionReturn(0); 2379 } 2380 2381 #undef __FUNCT__ 2382 #define __FUNCT__ "DMGetType" 2383 /*@C 2384 DMGetType - Gets the DM type name (as a string) from the DM. 2385 2386 Not Collective 2387 2388 Input Parameter: 2389 . dm - The DM 2390 2391 Output Parameter: 2392 . type - The DM type name 2393 2394 Level: intermediate 2395 2396 .keywords: DM, get, type, name 2397 .seealso: DMSetType(), DMCreate() 2398 @*/ 2399 PetscErrorCode DMGetType(DM dm, const DMType *type) 2400 { 2401 PetscErrorCode ierr; 2402 2403 PetscFunctionBegin; 2404 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2405 PetscValidCharPointer(type,2); 2406 if (!DMRegisterAllCalled) { 2407 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2408 } 2409 *type = ((PetscObject)dm)->type_name; 2410 PetscFunctionReturn(0); 2411 } 2412 2413 #undef __FUNCT__ 2414 #define __FUNCT__ "DMConvert" 2415 /*@C 2416 DMConvert - Converts a DM to another DM, either of the same or different type. 2417 2418 Collective on DM 2419 2420 Input Parameters: 2421 + dm - the DM 2422 - newtype - new DM type (use "same" for the same type) 2423 2424 Output Parameter: 2425 . M - pointer to new DM 2426 2427 Notes: 2428 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2429 the MPI communicator of the generated DM is always the same as the communicator 2430 of the input DM. 2431 2432 Level: intermediate 2433 2434 .seealso: DMCreate() 2435 @*/ 2436 PetscErrorCode DMConvert(DM dm, const DMType newtype, DM *M) 2437 { 2438 DM B; 2439 char convname[256]; 2440 PetscBool sametype, issame; 2441 PetscErrorCode ierr; 2442 2443 PetscFunctionBegin; 2444 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2445 PetscValidType(dm,1); 2446 PetscValidPointer(M,3); 2447 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2448 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2449 { 2450 PetscErrorCode (*conv)(DM, const DMType, DM *) = PETSC_NULL; 2451 2452 /* 2453 Order of precedence: 2454 1) See if a specialized converter is known to the current DM. 2455 2) See if a specialized converter is known to the desired DM class. 2456 3) See if a good general converter is registered for the desired class 2457 4) See if a good general converter is known for the current matrix. 2458 5) Use a really basic converter. 2459 */ 2460 2461 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2462 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2463 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2464 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2465 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2466 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2467 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2468 if (conv) goto foundconv; 2469 2470 /* 2) See if a specialized converter is known to the desired DM class. */ 2471 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2472 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2473 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2474 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2475 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2476 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2477 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2478 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2479 if (conv) { 2480 ierr = DMDestroy(&B);CHKERRQ(ierr); 2481 goto foundconv; 2482 } 2483 2484 #if 0 2485 /* 3) See if a good general converter is registered for the desired class */ 2486 conv = B->ops->convertfrom; 2487 ierr = DMDestroy(&B);CHKERRQ(ierr); 2488 if (conv) goto foundconv; 2489 2490 /* 4) See if a good general converter is known for the current matrix */ 2491 if (dm->ops->convert) { 2492 conv = dm->ops->convert; 2493 } 2494 if (conv) goto foundconv; 2495 #endif 2496 2497 /* 5) Use a really basic converter. */ 2498 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2499 2500 foundconv: 2501 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2502 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2503 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2504 } 2505 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2506 PetscFunctionReturn(0); 2507 } 2508 2509 /*--------------------------------------------------------------------------------------------------------------------*/ 2510 2511 #undef __FUNCT__ 2512 #define __FUNCT__ "DMRegister" 2513 /*@C 2514 DMRegister - See DMRegisterDynamic() 2515 2516 Level: advanced 2517 @*/ 2518 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2519 { 2520 char fullname[PETSC_MAX_PATH_LEN]; 2521 PetscErrorCode ierr; 2522 2523 PetscFunctionBegin; 2524 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2525 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2526 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2527 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2528 PetscFunctionReturn(0); 2529 } 2530 2531 2532 /*--------------------------------------------------------------------------------------------------------------------*/ 2533 #undef __FUNCT__ 2534 #define __FUNCT__ "DMRegisterDestroy" 2535 /*@C 2536 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2537 2538 Not Collective 2539 2540 Level: advanced 2541 2542 .keywords: DM, register, destroy 2543 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2544 @*/ 2545 PetscErrorCode DMRegisterDestroy(void) 2546 { 2547 PetscErrorCode ierr; 2548 2549 PetscFunctionBegin; 2550 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2551 DMRegisterAllCalled = PETSC_FALSE; 2552 PetscFunctionReturn(0); 2553 } 2554 2555 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2556 #include <mex.h> 2557 2558 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2559 2560 #undef __FUNCT__ 2561 #define __FUNCT__ "DMComputeFunction_Matlab" 2562 /* 2563 DMComputeFunction_Matlab - Calls the function that has been set with 2564 DMSetFunctionMatlab(). 2565 2566 For linear problems x is null 2567 2568 .seealso: DMSetFunction(), DMGetFunction() 2569 */ 2570 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2571 { 2572 PetscErrorCode ierr; 2573 DMMatlabContext *sctx; 2574 int nlhs = 1,nrhs = 4; 2575 mxArray *plhs[1],*prhs[4]; 2576 long long int lx = 0,ly = 0,ls = 0; 2577 2578 PetscFunctionBegin; 2579 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2580 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2581 PetscCheckSameComm(dm,1,y,3); 2582 2583 /* call Matlab function in ctx with arguments x and y */ 2584 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2585 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2586 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2587 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2588 prhs[0] = mxCreateDoubleScalar((double)ls); 2589 prhs[1] = mxCreateDoubleScalar((double)lx); 2590 prhs[2] = mxCreateDoubleScalar((double)ly); 2591 prhs[3] = mxCreateString(sctx->funcname); 2592 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2593 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2594 mxDestroyArray(prhs[0]); 2595 mxDestroyArray(prhs[1]); 2596 mxDestroyArray(prhs[2]); 2597 mxDestroyArray(prhs[3]); 2598 mxDestroyArray(plhs[0]); 2599 PetscFunctionReturn(0); 2600 } 2601 2602 2603 #undef __FUNCT__ 2604 #define __FUNCT__ "DMSetFunctionMatlab" 2605 /* 2606 DMSetFunctionMatlab - Sets the function evaluation routine 2607 2608 */ 2609 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2610 { 2611 PetscErrorCode ierr; 2612 DMMatlabContext *sctx; 2613 2614 PetscFunctionBegin; 2615 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2616 /* currently sctx is memory bleed */ 2617 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2618 if (!sctx) { 2619 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2620 } 2621 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2622 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2623 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2624 PetscFunctionReturn(0); 2625 } 2626 2627 #undef __FUNCT__ 2628 #define __FUNCT__ "DMComputeJacobian_Matlab" 2629 /* 2630 DMComputeJacobian_Matlab - Calls the function that has been set with 2631 DMSetJacobianMatlab(). 2632 2633 For linear problems x is null 2634 2635 .seealso: DMSetFunction(), DMGetFunction() 2636 */ 2637 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2638 { 2639 PetscErrorCode ierr; 2640 DMMatlabContext *sctx; 2641 int nlhs = 2,nrhs = 5; 2642 mxArray *plhs[2],*prhs[5]; 2643 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2644 2645 PetscFunctionBegin; 2646 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2647 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2648 2649 /* call MATLAB function in ctx with arguments x, A, and B */ 2650 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2651 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2652 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2653 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2654 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2655 prhs[0] = mxCreateDoubleScalar((double)ls); 2656 prhs[1] = mxCreateDoubleScalar((double)lx); 2657 prhs[2] = mxCreateDoubleScalar((double)lA); 2658 prhs[3] = mxCreateDoubleScalar((double)lB); 2659 prhs[4] = mxCreateString(sctx->jacname); 2660 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2661 *str = (MatStructure) mxGetScalar(plhs[0]); 2662 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2663 mxDestroyArray(prhs[0]); 2664 mxDestroyArray(prhs[1]); 2665 mxDestroyArray(prhs[2]); 2666 mxDestroyArray(prhs[3]); 2667 mxDestroyArray(prhs[4]); 2668 mxDestroyArray(plhs[0]); 2669 mxDestroyArray(plhs[1]); 2670 PetscFunctionReturn(0); 2671 } 2672 2673 2674 #undef __FUNCT__ 2675 #define __FUNCT__ "DMSetJacobianMatlab" 2676 /* 2677 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2678 2679 */ 2680 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2681 { 2682 PetscErrorCode ierr; 2683 DMMatlabContext *sctx; 2684 2685 PetscFunctionBegin; 2686 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2687 /* currently sctx is memory bleed */ 2688 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2689 if (!sctx) { 2690 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2691 } 2692 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2693 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2694 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2695 PetscFunctionReturn(0); 2696 } 2697 #endif 2698 2699 #undef __FUNCT__ 2700 #define __FUNCT__ "DMLoad" 2701 /*@C 2702 DMLoad - Loads a DM that has been stored in binary or HDF5 format 2703 with DMView(). 2704 2705 Collective on PetscViewer 2706 2707 Input Parameters: 2708 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2709 some related function before a call to DMLoad(). 2710 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2711 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2712 2713 Level: intermediate 2714 2715 Notes: 2716 Defaults to the DM DA. 2717 2718 Notes for advanced users: 2719 Most users should not need to know the details of the binary storage 2720 format, since DMLoad() and DMView() completely hide these details. 2721 But for anyone who's interested, the standard binary matrix storage 2722 format is 2723 .vb 2724 has not yet been determined 2725 .ve 2726 2727 In addition, PETSc automatically does the byte swapping for 2728 machines that store the bytes reversed, e.g. DEC alpha, freebsd, 2729 linux, Windows and the paragon; thus if you write your own binary 2730 read/write routines you have to swap the bytes; see PetscBinaryRead() 2731 and PetscBinaryWrite() to see how this may be done. 2732 2733 Concepts: vector^loading from file 2734 2735 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2736 @*/ 2737 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2738 { 2739 PetscErrorCode ierr; 2740 2741 PetscFunctionBegin; 2742 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2743 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2744 2745 if (!((PetscObject)newdm)->type_name) { 2746 ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr); 2747 } 2748 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2749 PetscFunctionReturn(0); 2750 } 2751 2752 /******************************** FEM Support **********************************/ 2753 2754 #undef __FUNCT__ 2755 #define __FUNCT__ "DMPrintCellVector" 2756 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 2757 PetscInt f; 2758 PetscErrorCode ierr; 2759 2760 PetscFunctionBegin; 2761 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2762 for(f = 0; f < len; ++f) { 2763 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2764 } 2765 PetscFunctionReturn(0); 2766 } 2767 2768 #undef __FUNCT__ 2769 #define __FUNCT__ "DMPrintCellMatrix" 2770 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 2771 PetscInt f, g; 2772 PetscErrorCode ierr; 2773 2774 PetscFunctionBegin; 2775 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2776 for(f = 0; f < rows; ++f) { 2777 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2778 for(g = 0; g < cols; ++g) { 2779 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2780 } 2781 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2782 } 2783 PetscFunctionReturn(0); 2784 } 2785 2786 #undef __FUNCT__ 2787 #define __FUNCT__ "DMGetLocalFunction" 2788 /*@C 2789 DMGetLocalFunction - Get the local residual function from this DM 2790 2791 Not collective 2792 2793 Input Parameter: 2794 . dm - The DM 2795 2796 Output Parameter: 2797 . lf - The local residual function 2798 2799 Calling sequence of lf: 2800 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2801 2802 + snes - the SNES context 2803 . x - local vector with the state at which to evaluate residual 2804 . f - local vector to put residual in 2805 - ctx - optional user-defined function context 2806 2807 Level: intermediate 2808 2809 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2810 @*/ 2811 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 2812 { 2813 PetscFunctionBegin; 2814 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2815 if (lf) *lf = dm->lf; 2816 PetscFunctionReturn(0); 2817 } 2818 2819 #undef __FUNCT__ 2820 #define __FUNCT__ "DMSetLocalFunction" 2821 /*@C 2822 DMSetLocalFunction - Set the local residual function from this DM 2823 2824 Not collective 2825 2826 Input Parameters: 2827 + dm - The DM 2828 - lf - The local residual function 2829 2830 Calling sequence of lf: 2831 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2832 2833 + snes - the SNES context 2834 . x - local vector with the state at which to evaluate residual 2835 . f - local vector to put residual in 2836 - ctx - optional user-defined function context 2837 2838 Level: intermediate 2839 2840 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2841 @*/ 2842 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 2843 { 2844 PetscFunctionBegin; 2845 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2846 dm->lf = lf; 2847 PetscFunctionReturn(0); 2848 } 2849 2850 #undef __FUNCT__ 2851 #define __FUNCT__ "DMGetLocalJacobian" 2852 /*@C 2853 DMGetLocalJacobian - Get the local Jacobian function from this DM 2854 2855 Not collective 2856 2857 Input Parameter: 2858 . dm - The DM 2859 2860 Output Parameter: 2861 . lj - The local Jacobian function 2862 2863 Calling sequence of lj: 2864 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 2865 2866 + snes - the SNES context 2867 . x - local vector with the state at which to evaluate residual 2868 . J - matrix to put Jacobian in 2869 . M - matrix to use for defining Jacobian preconditioner 2870 - ctx - optional user-defined function context 2871 2872 Level: intermediate 2873 2874 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 2875 @*/ 2876 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 2877 { 2878 PetscFunctionBegin; 2879 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2880 if (lj) *lj = dm->lj; 2881 PetscFunctionReturn(0); 2882 } 2883 2884 #undef __FUNCT__ 2885 #define __FUNCT__ "DMSetLocalJacobian" 2886 /*@C 2887 DMSetLocalJacobian - Set the local Jacobian function from this DM 2888 2889 Not collective 2890 2891 Input Parameters: 2892 + dm - The DM 2893 - lj - The local Jacobian function 2894 2895 Calling sequence of lj: 2896 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 2897 2898 + snes - the SNES context 2899 . x - local vector with the state at which to evaluate residual 2900 . J - matrix to put Jacobian in 2901 . M - matrix to use for defining Jacobian preconditioner 2902 - ctx - optional user-defined function context 2903 2904 Level: intermediate 2905 2906 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 2907 @*/ 2908 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 2909 { 2910 PetscFunctionBegin; 2911 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2912 dm->lj = lj; 2913 PetscFunctionReturn(0); 2914 } 2915 2916 #undef __FUNCT__ 2917 #define __FUNCT__ "DMGetDefaultSection" 2918 /*@ 2919 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2920 2921 Input Parameter: 2922 . dm - The DM 2923 2924 Output Parameter: 2925 . section - The PetscSection 2926 2927 Level: intermediate 2928 2929 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2930 2931 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2932 @*/ 2933 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 2934 PetscFunctionBegin; 2935 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2936 PetscValidPointer(section, 2); 2937 *section = dm->defaultSection; 2938 PetscFunctionReturn(0); 2939 } 2940 2941 #undef __FUNCT__ 2942 #define __FUNCT__ "DMSetDefaultSection" 2943 /*@ 2944 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 2945 2946 Input Parameters: 2947 + dm - The DM 2948 - section - The PetscSection 2949 2950 Level: intermediate 2951 2952 Note: Any existing Section will be destroyed 2953 2954 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2955 @*/ 2956 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 2957 PetscErrorCode ierr; 2958 2959 PetscFunctionBegin; 2960 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2961 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 2962 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 2963 dm->defaultSection = section; 2964 PetscFunctionReturn(0); 2965 } 2966 2967 #undef __FUNCT__ 2968 #define __FUNCT__ "DMGetDefaultGlobalSection" 2969 /*@ 2970 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 2971 2972 Input Parameter: 2973 . dm - The DM 2974 2975 Output Parameter: 2976 . section - The PetscSection 2977 2978 Level: intermediate 2979 2980 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2981 2982 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 2983 @*/ 2984 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 2985 PetscErrorCode ierr; 2986 2987 PetscFunctionBegin; 2988 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2989 PetscValidPointer(section, 2); 2990 if (!dm->defaultGlobalSection) { 2991 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, &dm->defaultGlobalSection);CHKERRQ(ierr); 2992 } 2993 *section = dm->defaultGlobalSection; 2994 PetscFunctionReturn(0); 2995 } 2996 2997 #undef __FUNCT__ 2998 #define __FUNCT__ "DMGetDefaultSF" 2999 /*@ 3000 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3001 it is created from the default PetscSection layouts in the DM. 3002 3003 Input Parameter: 3004 . dm - The DM 3005 3006 Output Parameter: 3007 . sf - The PetscSF 3008 3009 Level: intermediate 3010 3011 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3012 3013 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3014 @*/ 3015 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3016 PetscInt nroots; 3017 PetscErrorCode ierr; 3018 3019 PetscFunctionBegin; 3020 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3021 PetscValidPointer(sf, 2); 3022 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3023 if (nroots < 0) { 3024 PetscSection section, gSection; 3025 3026 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3027 if (section) { 3028 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3029 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3030 } else { 3031 *sf = PETSC_NULL; 3032 PetscFunctionReturn(0); 3033 } 3034 } 3035 *sf = dm->defaultSF; 3036 PetscFunctionReturn(0); 3037 } 3038 3039 #undef __FUNCT__ 3040 #define __FUNCT__ "DMSetDefaultSF" 3041 /*@ 3042 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3043 3044 Input Parameters: 3045 + dm - The DM 3046 - sf - The PetscSF 3047 3048 Level: intermediate 3049 3050 Note: Any previous SF is destroyed 3051 3052 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3053 @*/ 3054 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3055 PetscErrorCode ierr; 3056 3057 PetscFunctionBegin; 3058 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3059 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3060 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3061 dm->defaultSF = sf; 3062 PetscFunctionReturn(0); 3063 } 3064 3065 #undef __FUNCT__ 3066 #define __FUNCT__ "DMCreateDefaultSF" 3067 /*@C 3068 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3069 describing the data layout. 3070 3071 Input Parameters: 3072 + dm - The DM 3073 . localSection - PetscSection describing the local data layout 3074 - globalSection - PetscSection describing the global data layout 3075 3076 Level: intermediate 3077 3078 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3079 @*/ 3080 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3081 { 3082 MPI_Comm comm = ((PetscObject) dm)->comm; 3083 PetscLayout layout; 3084 const PetscInt *ranges; 3085 PetscInt *local; 3086 PetscSFNode *remote; 3087 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3088 PetscMPIInt size, rank; 3089 PetscErrorCode ierr; 3090 3091 PetscFunctionBegin; 3092 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3093 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3094 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3095 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3096 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3097 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3098 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3099 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3100 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3101 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3102 for(p = pStart, nleaves = 0; p < pEnd; ++p) { 3103 PetscInt dof, cdof; 3104 3105 ierr = PetscSectionGetDof(globalSection, p, &dof);CHKERRQ(ierr); 3106 ierr = PetscSectionGetConstraintDof(globalSection, p, &cdof);CHKERRQ(ierr); 3107 nleaves += dof < 0 ? -(dof+1)-cdof : dof-cdof; 3108 } 3109 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3110 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3111 for(p = pStart, l = 0; p < pEnd; ++p) { 3112 PetscInt *cind; 3113 PetscInt dof, gdof, cdof, dim, off, goff, d, c; 3114 3115 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3116 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3117 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3118 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3119 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3120 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3121 dim = dof-cdof; 3122 for(d = 0, c = 0; d < dof; ++d) { 3123 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3124 local[l+d-c] = off+d; 3125 } 3126 if (gdof < 0) { 3127 for(d = 0; d < dim; ++d, ++l) { 3128 PetscInt offset = -(goff+1) + d, r; 3129 3130 for(r = 0; r < size; ++r) { 3131 if ((offset >= ranges[r]) && (offset < ranges[r+1])) break; 3132 } 3133 remote[l].rank = r; 3134 remote[l].index = offset - ranges[r]; 3135 } 3136 } else { 3137 for(d = 0; d < dim; ++d, ++l) { 3138 remote[l].rank = rank; 3139 remote[l].index = goff+d - ranges[rank]; 3140 } 3141 } 3142 } 3143 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3144 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3145 PetscFunctionReturn(0); 3146 } 3147