1 #include <petscsnes.h> 2 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = PETSC_NULL; 35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 36 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 39 #endif 40 41 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 42 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 43 44 45 v->ltogmap = PETSC_NULL; 46 v->ltogmapb = PETSC_NULL; 47 v->bs = 1; 48 v->coloringtype = IS_COLORING_GLOBAL; 49 v->lf = PETSC_NULL; 50 v->lj = PETSC_NULL; 51 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 52 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 53 v->defaultSection = PETSC_NULL; 54 v->defaultGlobalSection = PETSC_NULL; 55 { 56 PetscInt i; 57 for(i = 0; i < 10; ++i) { 58 v->nullspaceConstructors[i] = PETSC_NULL; 59 } 60 } 61 62 *dm = v; 63 PetscFunctionReturn(0); 64 } 65 66 67 #undef __FUNCT__ 68 #define __FUNCT__ "DMSetVecType" 69 /*@C 70 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 71 72 Logically Collective on DMDA 73 74 Input Parameter: 75 + da - initial distributed array 76 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 77 78 Options Database: 79 . -dm_vec_type ctype 80 81 Level: intermediate 82 83 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 84 @*/ 85 PetscErrorCode DMSetVecType(DM da,const VecType ctype) 86 { 87 PetscErrorCode ierr; 88 89 PetscFunctionBegin; 90 PetscValidHeaderSpecific(da,DM_CLASSID,1); 91 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 92 ierr = PetscStrallocpy(ctype,&da->vectype);CHKERRQ(ierr); 93 PetscFunctionReturn(0); 94 } 95 96 #undef __FUNCT__ 97 #define __FUNCT__ "DMSetMatType" 98 /*@C 99 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 100 101 Logically Collective on DM 102 103 Input Parameter: 104 + dm - the DM context 105 . ctype - the matrix type 106 107 Options Database: 108 . -dm_mat_type ctype 109 110 Level: intermediate 111 112 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 113 @*/ 114 PetscErrorCode DMSetMatType(DM dm,const MatType ctype) 115 { 116 PetscErrorCode ierr; 117 PetscFunctionBegin; 118 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 119 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 120 ierr = PetscStrallocpy(ctype,&dm->mattype);CHKERRQ(ierr); 121 PetscFunctionReturn(0); 122 } 123 124 #undef __FUNCT__ 125 #define __FUNCT__ "DMSetOptionsPrefix" 126 /*@C 127 DMSetOptionsPrefix - Sets the prefix used for searching for all 128 DMDA options in the database. 129 130 Logically Collective on DMDA 131 132 Input Parameter: 133 + da - the DMDA context 134 - prefix - the prefix to prepend to all option names 135 136 Notes: 137 A hyphen (-) must NOT be given at the beginning of the prefix name. 138 The first character of all runtime options is AUTOMATICALLY the hyphen. 139 140 Level: advanced 141 142 .keywords: DMDA, set, options, prefix, database 143 144 .seealso: DMSetFromOptions() 145 @*/ 146 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 147 { 148 PetscErrorCode ierr; 149 150 PetscFunctionBegin; 151 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 152 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 153 PetscFunctionReturn(0); 154 } 155 156 #undef __FUNCT__ 157 #define __FUNCT__ "DMDestroy" 158 /*@ 159 DMDestroy - Destroys a vector packer or DMDA. 160 161 Collective on DM 162 163 Input Parameter: 164 . dm - the DM object to destroy 165 166 Level: developer 167 168 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 169 170 @*/ 171 PetscErrorCode DMDestroy(DM *dm) 172 { 173 PetscInt i, cnt = 0; 174 DMNamedVecLink nlink,nnext; 175 PetscErrorCode ierr; 176 177 PetscFunctionBegin; 178 if (!*dm) PetscFunctionReturn(0); 179 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 180 181 /* count all the circular references of DM and its contained Vecs */ 182 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 183 if ((*dm)->localin[i]) {cnt++;} 184 if ((*dm)->globalin[i]) {cnt++;} 185 } 186 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 187 if ((*dm)->x) { 188 PetscObject obj; 189 ierr = PetscObjectQuery((PetscObject)(*dm)->x,"DM",&obj);CHKERRQ(ierr); 190 if (obj == (PetscObject)*dm) cnt++; 191 } 192 193 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 194 /* 195 Need this test because the dm references the vectors that 196 reference the dm, so destroying the dm calls destroy on the 197 vectors that cause another destroy on the dm 198 */ 199 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 200 ((PetscObject) (*dm))->refct = 0; 201 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 202 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 203 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 204 } 205 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 206 nnext = nlink->next; 207 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 208 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 209 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 210 ierr = PetscFree(nlink);CHKERRQ(ierr); 211 } 212 (*dm)->namedglobal = PETSC_NULL; 213 214 /* Destroy the list of hooks */ 215 { 216 DMCoarsenHookLink link,next; 217 for (link=(*dm)->coarsenhook; link; link=next) { 218 next = link->next; 219 ierr = PetscFree(link);CHKERRQ(ierr); 220 } 221 (*dm)->coarsenhook = PETSC_NULL; 222 } 223 { 224 DMRefineHookLink link,next; 225 for (link=(*dm)->refinehook; link; link=next) { 226 next = link->next; 227 ierr = PetscFree(link);CHKERRQ(ierr); 228 } 229 (*dm)->refinehook = PETSC_NULL; 230 } 231 /* Destroy the work arrays */ 232 { 233 DMWorkLink link,next; 234 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 235 for (link=(*dm)->workin; link; link=next) { 236 next = link->next; 237 ierr = PetscFree(link->mem);CHKERRQ(ierr); 238 ierr = PetscFree(link);CHKERRQ(ierr); 239 } 240 (*dm)->workin = PETSC_NULL; 241 } 242 243 if ((*dm)->ctx && (*dm)->ctxdestroy) { 244 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 245 } 246 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 247 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 248 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 249 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 250 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 251 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 252 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 253 254 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 255 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 256 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 257 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 258 /* if memory was published with AMS then destroy it */ 259 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 260 261 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 262 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 263 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 264 PetscFunctionReturn(0); 265 } 266 267 #undef __FUNCT__ 268 #define __FUNCT__ "DMSetUp" 269 /*@ 270 DMSetUp - sets up the data structures inside a DM object 271 272 Collective on DM 273 274 Input Parameter: 275 . dm - the DM object to setup 276 277 Level: developer 278 279 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 280 281 @*/ 282 PetscErrorCode DMSetUp(DM dm) 283 { 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 288 if (dm->setupcalled) PetscFunctionReturn(0); 289 if (dm->ops->setup) { 290 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 291 } 292 dm->setupcalled = PETSC_TRUE; 293 PetscFunctionReturn(0); 294 } 295 296 #undef __FUNCT__ 297 #define __FUNCT__ "DMSetFromOptions" 298 /*@ 299 DMSetFromOptions - sets parameters in a DM from the options database 300 301 Collective on DM 302 303 Input Parameter: 304 . dm - the DM object to set options for 305 306 Options Database: 307 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 308 . -dm_vec_type <type> type of vector to create inside DM 309 . -dm_mat_type <type> type of matrix to create inside DM 310 - -dm_coloring_type <global or ghosted> 311 312 Level: developer 313 314 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 315 316 @*/ 317 PetscErrorCode DMSetFromOptions(DM dm) 318 { 319 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 320 PetscErrorCode ierr; 321 char typeName[256] = MATAIJ; 322 323 PetscFunctionBegin; 324 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 325 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 326 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 327 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 328 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 329 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 330 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 331 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 332 if (flg) { 333 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 334 } 335 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof typeName,&flg);CHKERRQ(ierr); 336 if (flg) { 337 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 338 } 339 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 340 if (dm->ops->setfromoptions) { 341 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 342 } 343 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 344 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 345 ierr = PetscOptionsEnd();CHKERRQ(ierr); 346 if (flg1) { 347 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 348 } 349 if (flg2) { 350 PetscViewer viewer; 351 352 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 353 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 354 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 355 ierr = DMView(dm, viewer);CHKERRQ(ierr); 356 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 357 } 358 if (flg3) { 359 PetscViewer viewer; 360 361 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 362 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 363 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 364 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 365 ierr = DMView(dm, viewer);CHKERRQ(ierr); 366 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 367 } 368 if (flg4) { 369 PetscViewer viewer; 370 371 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 372 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 373 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 374 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 375 ierr = DMView(dm, viewer);CHKERRQ(ierr); 376 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 377 } 378 PetscFunctionReturn(0); 379 } 380 381 #undef __FUNCT__ 382 #define __FUNCT__ "DMView" 383 /*@C 384 DMView - Views a vector packer or DMDA. 385 386 Collective on DM 387 388 Input Parameter: 389 + dm - the DM object to view 390 - v - the viewer 391 392 Level: developer 393 394 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 395 396 @*/ 397 PetscErrorCode DMView(DM dm,PetscViewer v) 398 { 399 PetscErrorCode ierr; 400 401 PetscFunctionBegin; 402 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 403 if (!v) { 404 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 405 } 406 if (dm->ops->view) { 407 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 408 } 409 PetscFunctionReturn(0); 410 } 411 412 #undef __FUNCT__ 413 #define __FUNCT__ "DMCreateGlobalVector" 414 /*@ 415 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 416 417 Collective on DM 418 419 Input Parameter: 420 . dm - the DM object 421 422 Output Parameter: 423 . vec - the global vector 424 425 Level: beginner 426 427 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 428 429 @*/ 430 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 431 { 432 PetscErrorCode ierr; 433 434 PetscFunctionBegin; 435 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 436 if (dm->defaultSection) { 437 PetscSection gSection; 438 PetscInt localSize; 439 440 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 441 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 442 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 443 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 444 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 445 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 446 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 447 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 448 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 449 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 450 } else { 451 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 452 } 453 PetscFunctionReturn(0); 454 } 455 456 #undef __FUNCT__ 457 #define __FUNCT__ "DMCreateLocalVector" 458 /*@ 459 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 460 461 Not Collective 462 463 Input Parameter: 464 . dm - the DM object 465 466 Output Parameter: 467 . vec - the local vector 468 469 Level: beginner 470 471 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 472 473 @*/ 474 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 475 { 476 PetscErrorCode ierr; 477 478 PetscFunctionBegin; 479 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 480 if (dm->defaultSection) { 481 PetscInt localSize; 482 483 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 484 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 485 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 486 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 487 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 488 } else { 489 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 490 } 491 PetscFunctionReturn(0); 492 } 493 494 #undef __FUNCT__ 495 #define __FUNCT__ "DMGetLocalToGlobalMapping" 496 /*@ 497 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 498 499 Collective on DM 500 501 Input Parameter: 502 . dm - the DM that provides the mapping 503 504 Output Parameter: 505 . ltog - the mapping 506 507 Level: intermediate 508 509 Notes: 510 This mapping can then be used by VecSetLocalToGlobalMapping() or 511 MatSetLocalToGlobalMapping(). 512 513 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 514 @*/ 515 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 516 { 517 PetscErrorCode ierr; 518 519 PetscFunctionBegin; 520 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 521 PetscValidPointer(ltog,2); 522 if (!dm->ltogmap) { 523 PetscSection section, sectionGlobal; 524 525 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 526 if (section) { 527 PetscInt *ltog; 528 PetscInt pStart, pEnd, size, p, l; 529 530 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 531 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 532 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 533 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 534 for(p = pStart, l = 0; p < pEnd; ++p) { 535 PetscInt dof, off, c; 536 537 /* Should probably use constrained dofs */ 538 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 539 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 540 for(c = 0; c < dof; ++c, ++l) { 541 ltog[l] = off+c; 542 } 543 } 544 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 545 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 546 } else { 547 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 548 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 549 } 550 } 551 *ltog = dm->ltogmap; 552 PetscFunctionReturn(0); 553 } 554 555 #undef __FUNCT__ 556 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 557 /*@ 558 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 559 560 Collective on DM 561 562 Input Parameter: 563 . da - the distributed array that provides the mapping 564 565 Output Parameter: 566 . ltog - the block mapping 567 568 Level: intermediate 569 570 Notes: 571 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 572 MatSetLocalToGlobalMappingBlock(). 573 574 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 575 @*/ 576 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 577 { 578 PetscErrorCode ierr; 579 580 PetscFunctionBegin; 581 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 582 PetscValidPointer(ltog,2); 583 if (!dm->ltogmapb) { 584 PetscInt bs; 585 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 586 if (bs > 1) { 587 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 588 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 589 } else { 590 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 591 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 592 } 593 } 594 *ltog = dm->ltogmapb; 595 PetscFunctionReturn(0); 596 } 597 598 #undef __FUNCT__ 599 #define __FUNCT__ "DMGetBlockSize" 600 /*@ 601 DMGetBlockSize - Gets the inherent block size associated with a DM 602 603 Not Collective 604 605 Input Parameter: 606 . dm - the DM with block structure 607 608 Output Parameter: 609 . bs - the block size, 1 implies no exploitable block structure 610 611 Level: intermediate 612 613 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 614 @*/ 615 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 616 { 617 PetscFunctionBegin; 618 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 619 PetscValidPointer(bs,2); 620 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 621 *bs = dm->bs; 622 PetscFunctionReturn(0); 623 } 624 625 #undef __FUNCT__ 626 #define __FUNCT__ "DMCreateInterpolation" 627 /*@ 628 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 629 630 Collective on DM 631 632 Input Parameter: 633 + dm1 - the DM object 634 - dm2 - the second, finer DM object 635 636 Output Parameter: 637 + mat - the interpolation 638 - vec - the scaling (optional) 639 640 Level: developer 641 642 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 643 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 644 645 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMDAGetCoordinateDA()) to interpolate the mesh coordinate vectors 646 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 647 648 649 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 650 651 @*/ 652 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 653 { 654 PetscErrorCode ierr; 655 656 PetscFunctionBegin; 657 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 658 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 659 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 660 PetscFunctionReturn(0); 661 } 662 663 #undef __FUNCT__ 664 #define __FUNCT__ "DMCreateInjection" 665 /*@ 666 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 667 668 Collective on DM 669 670 Input Parameter: 671 + dm1 - the DM object 672 - dm2 - the second, finer DM object 673 674 Output Parameter: 675 . ctx - the injection 676 677 Level: developer 678 679 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 680 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 681 682 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 683 684 @*/ 685 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 686 { 687 PetscErrorCode ierr; 688 689 PetscFunctionBegin; 690 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 691 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 692 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 693 PetscFunctionReturn(0); 694 } 695 696 #undef __FUNCT__ 697 #define __FUNCT__ "DMCreateColoring" 698 /*@C 699 DMCreateColoring - Gets coloring for a DMDA or DMComposite 700 701 Collective on DM 702 703 Input Parameter: 704 + dm - the DM object 705 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 706 - matype - either MATAIJ or MATBAIJ 707 708 Output Parameter: 709 . coloring - the coloring 710 711 Level: developer 712 713 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 714 715 @*/ 716 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring) 717 { 718 PetscErrorCode ierr; 719 720 PetscFunctionBegin; 721 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 722 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 723 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 724 PetscFunctionReturn(0); 725 } 726 727 #undef __FUNCT__ 728 #define __FUNCT__ "DMCreateMatrix" 729 /*@C 730 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 731 732 Collective on DM 733 734 Input Parameter: 735 + dm - the DM object 736 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 737 any type which inherits from one of these (such as MATAIJ) 738 739 Output Parameter: 740 . mat - the empty Jacobian 741 742 Level: beginner 743 744 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 745 do not need to do it yourself. 746 747 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 748 the nonzero pattern call DMDASetMatPreallocateOnly() 749 750 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 751 internally by PETSc. 752 753 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 754 the indices for the global numbering for DMDAs which is complicated. 755 756 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 757 758 @*/ 759 PetscErrorCode DMCreateMatrix(DM dm,const MatType mtype,Mat *mat) 760 { 761 PetscErrorCode ierr; 762 763 PetscFunctionBegin; 764 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 765 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 766 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 767 #endif 768 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 769 PetscValidPointer(mat,3); 770 if (dm->mattype) { 771 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 772 } else { 773 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 774 } 775 PetscFunctionReturn(0); 776 } 777 778 #undef __FUNCT__ 779 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 780 /*@ 781 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 782 preallocated but the nonzero structure and zero values will not be set. 783 784 Logically Collective on DMDA 785 786 Input Parameter: 787 + dm - the DM 788 - only - PETSC_TRUE if only want preallocation 789 790 Level: developer 791 .seealso DMCreateMatrix() 792 @*/ 793 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 794 { 795 PetscFunctionBegin; 796 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 797 dm->prealloc_only = only; 798 PetscFunctionReturn(0); 799 } 800 801 #undef __FUNCT__ 802 #define __FUNCT__ "DMGetWorkArray" 803 /*@C 804 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 805 806 Not Collective 807 808 Input Parameters: 809 + dm - the DM object 810 . count - The minium size 811 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 812 813 Output Parameter: 814 . array - the work array 815 816 Level: developer 817 818 .seealso DMDestroy(), DMCreate() 819 @*/ 820 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 821 { 822 PetscErrorCode ierr; 823 DMWorkLink link; 824 size_t size; 825 826 PetscFunctionBegin; 827 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 828 PetscValidPointer(mem,4); 829 if (dm->workin) { 830 link = dm->workin; 831 dm->workin = dm->workin->next; 832 } else { 833 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 834 } 835 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 836 if (size*count > link->bytes) { 837 ierr = PetscFree(link->mem);CHKERRQ(ierr); 838 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 839 link->bytes = size*count; 840 } 841 link->next = dm->workout; 842 dm->workout = link; 843 *(void**)mem = link->mem; 844 PetscFunctionReturn(0); 845 } 846 847 #undef __FUNCT__ 848 #define __FUNCT__ "DMRestoreWorkArray" 849 /*@C 850 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 851 852 Not Collective 853 854 Input Parameters: 855 + dm - the DM object 856 . count - The minium size 857 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 858 859 Output Parameter: 860 . array - the work array 861 862 Level: developer 863 864 .seealso DMDestroy(), DMCreate() 865 @*/ 866 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 867 { 868 DMWorkLink *p,link; 869 870 PetscFunctionBegin; 871 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 872 PetscValidPointer(mem,4); 873 for (p=&dm->workout; (link=*p); p=&link->next) { 874 if (link->mem == *(void**)mem) { 875 *p = link->next; 876 link->next = dm->workin; 877 dm->workin = link; 878 *(void**)mem = PETSC_NULL; 879 PetscFunctionReturn(0); 880 } 881 } 882 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 883 PetscFunctionReturn(0); 884 } 885 886 #undef __FUNCT__ 887 #define __FUNCT__ "DMSetNullSpaceConstructor" 888 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 889 { 890 PetscFunctionBegin; 891 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 892 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 893 dm->nullspaceConstructors[field] = nullsp; 894 PetscFunctionReturn(0); 895 } 896 897 #undef __FUNCT__ 898 #define __FUNCT__ "DMCreateFieldIS" 899 /*@C 900 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 901 902 Not collective 903 904 Input Parameter: 905 . dm - the DM object 906 907 Output Parameters: 908 + numFields - The number of fields (or PETSC_NULL if not requested) 909 . fieldNames - The name for each field (or PETSC_NULL if not requested) 910 - fields - The global indices for each field (or PETSC_NULL if not requested) 911 912 Level: intermediate 913 914 Notes: 915 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 916 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 917 PetscFree(). 918 919 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 920 @*/ 921 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 922 { 923 PetscSection section, sectionGlobal; 924 PetscErrorCode ierr; 925 926 PetscFunctionBegin; 927 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 928 if (numFields) { 929 PetscValidPointer(numFields,2); 930 *numFields = 0; 931 } 932 if (fieldNames) { 933 PetscValidPointer(fieldNames,3); 934 *fieldNames = PETSC_NULL; 935 } 936 if (fields) { 937 PetscValidPointer(fields,4); 938 *fields = PETSC_NULL; 939 } 940 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 941 if (section) { 942 PetscInt *fieldSizes, **fieldIndices; 943 PetscInt nF, f, pStart, pEnd, p; 944 945 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 946 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 947 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 948 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 949 for(f = 0; f < nF; ++f) { 950 fieldSizes[f] = 0; 951 } 952 for(p = pStart; p < pEnd; ++p) { 953 PetscInt gdof; 954 955 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 956 if (gdof > 0) { 957 for(f = 0; f < nF; ++f) { 958 PetscInt fdof, fcdof; 959 960 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 961 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 962 fieldSizes[f] += fdof-fcdof; 963 } 964 } 965 } 966 for(f = 0; f < nF; ++f) { 967 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 968 fieldSizes[f] = 0; 969 } 970 for(p = pStart; p < pEnd; ++p) { 971 PetscInt gdof, goff; 972 973 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 974 if (gdof > 0) { 975 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 976 for(f = 0; f < nF; ++f) { 977 PetscInt fdof, fcdof, fc; 978 979 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 980 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 981 for(fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 982 fieldIndices[f][fieldSizes[f]] = goff++; 983 } 984 } 985 } 986 } 987 if (numFields) {*numFields = nF;} 988 if (fieldNames) { 989 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 990 for(f = 0; f < nF; ++f) { 991 const char *fieldName; 992 993 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 994 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 995 } 996 } 997 if (fields) { 998 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 999 for(f = 0; f < nF; ++f) { 1000 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1001 } 1002 } 1003 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1004 } else { 1005 if(dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 1006 } 1007 PetscFunctionReturn(0); 1008 } 1009 1010 #undef __FUNCT__ 1011 #define __FUNCT__ "DMCreateFieldDecompositionDM" 1012 /*@C 1013 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 1014 1015 Not Collective 1016 1017 Input Parameters: 1018 + dm - the DM object 1019 - name - the name of the field decomposition 1020 1021 Output Parameter: 1022 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 1023 1024 Level: advanced 1025 1026 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1027 @*/ 1028 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 1029 { 1030 PetscErrorCode ierr; 1031 1032 PetscFunctionBegin; 1033 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1034 PetscValidCharPointer(name,2); 1035 PetscValidPointer(ddm,3); 1036 *ddm = PETSC_NULL; 1037 if(dm->ops->createfielddecompositiondm) { 1038 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1039 } 1040 PetscFunctionReturn(0); 1041 } 1042 1043 #undef __FUNCT__ 1044 #define __FUNCT__ "DMCreateFieldDecomposition" 1045 /*@C 1046 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1047 corresponding to different fields: each IS contains the global indices of the dofs of the 1048 corresponding field. The optional list of DMs define the DM for each subproblem. 1049 Generalizes DMCreateFieldIS(). 1050 1051 Not collective 1052 1053 Input Parameter: 1054 . dm - the DM object 1055 1056 Output Parameters: 1057 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1058 . namelist - The name for each field (or PETSC_NULL if not requested) 1059 . islist - The global indices for each field (or PETSC_NULL if not requested) 1060 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1061 1062 Level: intermediate 1063 1064 Notes: 1065 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1066 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1067 and all of the arrays should be freed with PetscFree(). 1068 1069 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1070 @*/ 1071 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1072 { 1073 PetscErrorCode ierr; 1074 1075 PetscFunctionBegin; 1076 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1077 if (len) {PetscValidPointer(len,2); *len = 0;} 1078 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1079 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1080 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1081 if (!dm->ops->createfielddecomposition) { 1082 PetscSection section; 1083 PetscInt numFields, f; 1084 1085 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1086 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1087 if (section && numFields && dm->ops->createsubdm) { 1088 *len = numFields; 1089 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1090 for(f = 0; f < numFields; ++f) { 1091 const char *fieldName; 1092 1093 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1094 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1095 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1096 } 1097 } else { 1098 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1099 /* By default there are no DMs associated with subproblems. */ 1100 if(dmlist) *dmlist = PETSC_NULL; 1101 } 1102 } 1103 else { 1104 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1105 } 1106 PetscFunctionReturn(0); 1107 } 1108 1109 #undef __FUNCT__ 1110 #define __FUNCT__ "DMCreateSubDM" 1111 /*@C 1112 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1113 The fields are defined by DMCreateFieldIS(). 1114 1115 Not collective 1116 1117 Input Parameters: 1118 + dm - the DM object 1119 . numFields - number of fields in this subproblem 1120 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1121 1122 Output Parameters: 1123 . is - The global indices for the subproblem 1124 - dm - The DM for the subproblem 1125 1126 Level: intermediate 1127 1128 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1129 @*/ 1130 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1131 { 1132 PetscErrorCode ierr; 1133 1134 PetscFunctionBegin; 1135 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1136 PetscValidPointer(fields,3); 1137 if (is) {PetscValidPointer(is,4);} 1138 if (subdm) {PetscValidPointer(subdm,5);} 1139 if (dm->ops->createsubdm) { 1140 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1141 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1142 PetscFunctionReturn(0); 1143 } 1144 1145 #undef __FUNCT__ 1146 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1147 /*@C 1148 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1149 1150 Not Collective 1151 1152 Input Parameters: 1153 + dm - the DM object 1154 - name - the name of the subdomain decomposition 1155 1156 Output Parameter: 1157 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1158 1159 Level: advanced 1160 1161 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1162 @*/ 1163 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1164 { 1165 PetscErrorCode ierr; 1166 1167 PetscFunctionBegin; 1168 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1169 PetscValidCharPointer(name,2); 1170 PetscValidPointer(ddm,3); 1171 *ddm = PETSC_NULL; 1172 if(dm->ops->createdomaindecompositiondm) { 1173 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1174 } 1175 PetscFunctionReturn(0); 1176 } 1177 1178 #undef __FUNCT__ 1179 #define __FUNCT__ "DMCreateDomainDecomposition" 1180 /*@C 1181 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1182 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1183 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1184 define a nonoverlapping covering, while outer subdomains can overlap. 1185 The optional list of DMs define the DM for each subproblem. 1186 1187 Not collective 1188 1189 Input Parameter: 1190 . dm - the DM object 1191 1192 Output Parameters: 1193 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1194 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1195 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1196 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1197 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1198 1199 Level: intermediate 1200 1201 Notes: 1202 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1203 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1204 and all of the arrays should be freed with PetscFree(). 1205 1206 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1207 @*/ 1208 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1209 { 1210 PetscErrorCode ierr; 1211 1212 PetscFunctionBegin; 1213 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1214 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1215 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1216 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1217 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1218 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1219 if(dm->ops->createdomaindecomposition) { 1220 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1221 } 1222 PetscFunctionReturn(0); 1223 } 1224 1225 #undef __FUNCT__ 1226 #define __FUNCT__ "DMRefine" 1227 /*@ 1228 DMRefine - Refines a DM object 1229 1230 Collective on DM 1231 1232 Input Parameter: 1233 + dm - the DM object 1234 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1235 1236 Output Parameter: 1237 . dmf - the refined DM, or PETSC_NULL 1238 1239 Note: If no refinement was done, the return value is PETSC_NULL 1240 1241 Level: developer 1242 1243 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1244 @*/ 1245 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1246 { 1247 PetscErrorCode ierr; 1248 DMRefineHookLink link; 1249 1250 PetscFunctionBegin; 1251 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1252 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1253 if (*dmf) { 1254 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1255 (*dmf)->ops->initialguess = dm->ops->initialguess; 1256 (*dmf)->ops->function = dm->ops->function; 1257 (*dmf)->ops->functionj = dm->ops->functionj; 1258 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1259 (*dmf)->ops->jacobian = dm->ops->jacobian; 1260 } 1261 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1262 (*dmf)->ctx = dm->ctx; 1263 (*dmf)->leveldown = dm->leveldown; 1264 (*dmf)->levelup = dm->levelup + 1; 1265 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1266 for (link=dm->refinehook; link; link=link->next) { 1267 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1268 } 1269 } 1270 PetscFunctionReturn(0); 1271 } 1272 1273 #undef __FUNCT__ 1274 #define __FUNCT__ "DMRefineHookAdd" 1275 /*@ 1276 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1277 1278 Logically Collective 1279 1280 Input Arguments: 1281 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1282 . refinehook - function to run when setting up a coarser level 1283 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1284 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1285 1286 Calling sequence of refinehook: 1287 $ refinehook(DM coarse,DM fine,void *ctx); 1288 1289 + coarse - coarse level DM 1290 . fine - fine level DM to interpolate problem to 1291 - ctx - optional user-defined function context 1292 1293 Calling sequence for interphook: 1294 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1295 1296 + coarse - coarse level DM 1297 . interp - matrix interpolating a coarse-level solution to the finer grid 1298 . fine - fine level DM to update 1299 - ctx - optional user-defined function context 1300 1301 Level: advanced 1302 1303 Notes: 1304 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1305 1306 If this function is called multiple times, the hooks will be run in the order they are added. 1307 1308 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1309 @*/ 1310 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1311 { 1312 PetscErrorCode ierr; 1313 DMRefineHookLink link,*p; 1314 1315 PetscFunctionBegin; 1316 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1317 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1318 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1319 link->refinehook = refinehook; 1320 link->interphook = interphook; 1321 link->ctx = ctx; 1322 link->next = PETSC_NULL; 1323 *p = link; 1324 PetscFunctionReturn(0); 1325 } 1326 1327 #undef __FUNCT__ 1328 #define __FUNCT__ "DMInterpolate" 1329 /*@ 1330 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1331 1332 Collective if any hooks are 1333 1334 Input Arguments: 1335 + coarse - coarser DM to use as a base 1336 . restrct - interpolation matrix, apply using MatInterpolate() 1337 - fine - finer DM to update 1338 1339 Level: developer 1340 1341 .seealso: DMRefineHookAdd(), MatInterpolate() 1342 @*/ 1343 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1344 { 1345 PetscErrorCode ierr; 1346 DMRefineHookLink link; 1347 1348 PetscFunctionBegin; 1349 for (link=fine->refinehook; link; link=link->next) { 1350 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1351 } 1352 PetscFunctionReturn(0); 1353 } 1354 1355 #undef __FUNCT__ 1356 #define __FUNCT__ "DMGetRefineLevel" 1357 /*@ 1358 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1359 1360 Not Collective 1361 1362 Input Parameter: 1363 . dm - the DM object 1364 1365 Output Parameter: 1366 . level - number of refinements 1367 1368 Level: developer 1369 1370 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1371 1372 @*/ 1373 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1374 { 1375 PetscFunctionBegin; 1376 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1377 *level = dm->levelup; 1378 PetscFunctionReturn(0); 1379 } 1380 1381 #undef __FUNCT__ 1382 #define __FUNCT__ "DMGlobalToLocalBegin" 1383 /*@ 1384 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1385 1386 Neighbor-wise Collective on DM 1387 1388 Input Parameters: 1389 + dm - the DM object 1390 . g - the global vector 1391 . mode - INSERT_VALUES or ADD_VALUES 1392 - l - the local vector 1393 1394 1395 Level: beginner 1396 1397 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1398 1399 @*/ 1400 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1401 { 1402 PetscSF sf; 1403 PetscErrorCode ierr; 1404 1405 PetscFunctionBegin; 1406 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1407 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1408 if (sf) { 1409 PetscScalar *lArray, *gArray; 1410 1411 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1412 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1413 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1414 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1415 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1416 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1417 } else { 1418 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1419 } 1420 PetscFunctionReturn(0); 1421 } 1422 1423 #undef __FUNCT__ 1424 #define __FUNCT__ "DMGlobalToLocalEnd" 1425 /*@ 1426 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1427 1428 Neighbor-wise Collective on DM 1429 1430 Input Parameters: 1431 + dm - the DM object 1432 . g - the global vector 1433 . mode - INSERT_VALUES or ADD_VALUES 1434 - l - the local vector 1435 1436 1437 Level: beginner 1438 1439 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1440 1441 @*/ 1442 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1443 { 1444 PetscSF sf; 1445 PetscErrorCode ierr; 1446 PetscScalar *lArray, *gArray; 1447 1448 PetscFunctionBegin; 1449 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1450 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1451 if (sf) { 1452 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1453 1454 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1455 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1456 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1457 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1458 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1459 } else { 1460 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1461 } 1462 PetscFunctionReturn(0); 1463 } 1464 1465 #undef __FUNCT__ 1466 #define __FUNCT__ "DMLocalToGlobalBegin" 1467 /*@ 1468 DMLocalToGlobalBegin - updates global vectors from local vectors 1469 1470 Neighbor-wise Collective on DM 1471 1472 Input Parameters: 1473 + dm - the DM object 1474 . l - the local vector 1475 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1476 base point. 1477 - - the global vector 1478 1479 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1480 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1481 global array to the final global array with VecAXPY(). 1482 1483 Level: beginner 1484 1485 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1486 1487 @*/ 1488 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1489 { 1490 PetscSF sf; 1491 PetscErrorCode ierr; 1492 1493 PetscFunctionBegin; 1494 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1495 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1496 if (sf) { 1497 MPI_Op op; 1498 PetscScalar *lArray, *gArray; 1499 1500 switch(mode) { 1501 case INSERT_VALUES: 1502 case INSERT_ALL_VALUES: 1503 #if defined(PETSC_HAVE_MPI_REPLACE) 1504 op = MPI_REPLACE; break; 1505 #else 1506 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1507 #endif 1508 case ADD_VALUES: 1509 case ADD_ALL_VALUES: 1510 op = MPI_SUM; break; 1511 default: 1512 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1513 } 1514 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1515 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1516 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1517 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1518 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1519 } else { 1520 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1521 } 1522 PetscFunctionReturn(0); 1523 } 1524 1525 #undef __FUNCT__ 1526 #define __FUNCT__ "DMLocalToGlobalEnd" 1527 /*@ 1528 DMLocalToGlobalEnd - updates global vectors from local vectors 1529 1530 Neighbor-wise Collective on DM 1531 1532 Input Parameters: 1533 + dm - the DM object 1534 . l - the local vector 1535 . mode - INSERT_VALUES or ADD_VALUES 1536 - g - the global vector 1537 1538 1539 Level: beginner 1540 1541 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1542 1543 @*/ 1544 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1545 { 1546 PetscSF sf; 1547 PetscErrorCode ierr; 1548 1549 PetscFunctionBegin; 1550 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1551 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1552 if (sf) { 1553 MPI_Op op; 1554 PetscScalar *lArray, *gArray; 1555 1556 switch(mode) { 1557 case INSERT_VALUES: 1558 case INSERT_ALL_VALUES: 1559 #if defined(PETSC_HAVE_MPI_REPLACE) 1560 op = MPI_REPLACE; break; 1561 #else 1562 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1563 #endif 1564 case ADD_VALUES: 1565 case ADD_ALL_VALUES: 1566 op = MPI_SUM; break; 1567 default: 1568 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1569 } 1570 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1571 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1572 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1573 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1574 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1575 } else { 1576 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1577 } 1578 PetscFunctionReturn(0); 1579 } 1580 1581 #undef __FUNCT__ 1582 #define __FUNCT__ "DMComputeJacobianDefault" 1583 /*@ 1584 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1585 1586 Collective on DM 1587 1588 Input Parameter: 1589 + dm - the DM object 1590 . x - location to compute Jacobian at; may be ignored for linear problems 1591 . A - matrix that defines the operator for the linear solve 1592 - B - the matrix used to construct the preconditioner 1593 1594 Level: developer 1595 1596 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1597 DMSetFunction() 1598 1599 @*/ 1600 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1601 { 1602 PetscErrorCode ierr; 1603 1604 PetscFunctionBegin; 1605 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1606 *stflag = SAME_NONZERO_PATTERN; 1607 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1608 if (A != B) { 1609 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1610 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1611 } 1612 PetscFunctionReturn(0); 1613 } 1614 1615 #undef __FUNCT__ 1616 #define __FUNCT__ "DMCoarsen" 1617 /*@ 1618 DMCoarsen - Coarsens a DM object 1619 1620 Collective on DM 1621 1622 Input Parameter: 1623 + dm - the DM object 1624 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1625 1626 Output Parameter: 1627 . dmc - the coarsened DM 1628 1629 Level: developer 1630 1631 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1632 1633 @*/ 1634 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1635 { 1636 PetscErrorCode ierr; 1637 DMCoarsenHookLink link; 1638 1639 PetscFunctionBegin; 1640 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1641 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1642 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1643 (*dmc)->ops->initialguess = dm->ops->initialguess; 1644 (*dmc)->ops->function = dm->ops->function; 1645 (*dmc)->ops->functionj = dm->ops->functionj; 1646 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1647 (*dmc)->ops->jacobian = dm->ops->jacobian; 1648 } 1649 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1650 (*dmc)->ctx = dm->ctx; 1651 (*dmc)->levelup = dm->levelup; 1652 (*dmc)->leveldown = dm->leveldown + 1; 1653 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1654 for (link=dm->coarsenhook; link; link=link->next) { 1655 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1656 } 1657 PetscFunctionReturn(0); 1658 } 1659 1660 #undef __FUNCT__ 1661 #define __FUNCT__ "DMCoarsenHookAdd" 1662 /*@ 1663 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1664 1665 Logically Collective 1666 1667 Input Arguments: 1668 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1669 . coarsenhook - function to run when setting up a coarser level 1670 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1671 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1672 1673 Calling sequence of coarsenhook: 1674 $ coarsenhook(DM fine,DM coarse,void *ctx); 1675 1676 + fine - fine level DM 1677 . coarse - coarse level DM to restrict problem to 1678 - ctx - optional user-defined function context 1679 1680 Calling sequence for restricthook: 1681 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1682 1683 + fine - fine level DM 1684 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1685 . rscale - scaling vector for restriction 1686 . inject - matrix restricting by injection 1687 . coarse - coarse level DM to update 1688 - ctx - optional user-defined function context 1689 1690 Level: advanced 1691 1692 Notes: 1693 This function is only needed if auxiliary data needs to be set up on coarse grids. 1694 1695 If this function is called multiple times, the hooks will be run in the order they are added. 1696 1697 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1698 extract the finest level information from its context (instead of from the SNES). 1699 1700 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1701 @*/ 1702 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1703 { 1704 PetscErrorCode ierr; 1705 DMCoarsenHookLink link,*p; 1706 1707 PetscFunctionBegin; 1708 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1709 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1710 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1711 link->coarsenhook = coarsenhook; 1712 link->restricthook = restricthook; 1713 link->ctx = ctx; 1714 link->next = PETSC_NULL; 1715 *p = link; 1716 PetscFunctionReturn(0); 1717 } 1718 1719 #undef __FUNCT__ 1720 #define __FUNCT__ "DMRestrict" 1721 /*@ 1722 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1723 1724 Collective if any hooks are 1725 1726 Input Arguments: 1727 + fine - finer DM to use as a base 1728 . restrct - restriction matrix, apply using MatRestrict() 1729 . inject - injection matrix, also use MatRestrict() 1730 - coarse - coarer DM to update 1731 1732 Level: developer 1733 1734 .seealso: DMCoarsenHookAdd(), MatRestrict() 1735 @*/ 1736 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1737 { 1738 PetscErrorCode ierr; 1739 DMCoarsenHookLink link; 1740 1741 PetscFunctionBegin; 1742 for (link=fine->coarsenhook; link; link=link->next) { 1743 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1744 } 1745 PetscFunctionReturn(0); 1746 } 1747 1748 #undef __FUNCT__ 1749 #define __FUNCT__ "DMGetCoarsenLevel" 1750 /*@ 1751 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1752 1753 Not Collective 1754 1755 Input Parameter: 1756 . dm - the DM object 1757 1758 Output Parameter: 1759 . level - number of coarsenings 1760 1761 Level: developer 1762 1763 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1764 1765 @*/ 1766 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 1767 { 1768 PetscFunctionBegin; 1769 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1770 *level = dm->leveldown; 1771 PetscFunctionReturn(0); 1772 } 1773 1774 1775 1776 #undef __FUNCT__ 1777 #define __FUNCT__ "DMRefineHierarchy" 1778 /*@C 1779 DMRefineHierarchy - Refines a DM object, all levels at once 1780 1781 Collective on DM 1782 1783 Input Parameter: 1784 + dm - the DM object 1785 - nlevels - the number of levels of refinement 1786 1787 Output Parameter: 1788 . dmf - the refined DM hierarchy 1789 1790 Level: developer 1791 1792 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1793 1794 @*/ 1795 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 1796 { 1797 PetscErrorCode ierr; 1798 1799 PetscFunctionBegin; 1800 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1801 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1802 if (nlevels == 0) PetscFunctionReturn(0); 1803 if (dm->ops->refinehierarchy) { 1804 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 1805 } else if (dm->ops->refine) { 1806 PetscInt i; 1807 1808 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 1809 for (i=1; i<nlevels; i++) { 1810 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 1811 } 1812 } else { 1813 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 1814 } 1815 PetscFunctionReturn(0); 1816 } 1817 1818 #undef __FUNCT__ 1819 #define __FUNCT__ "DMCoarsenHierarchy" 1820 /*@C 1821 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 1822 1823 Collective on DM 1824 1825 Input Parameter: 1826 + dm - the DM object 1827 - nlevels - the number of levels of coarsening 1828 1829 Output Parameter: 1830 . dmc - the coarsened DM hierarchy 1831 1832 Level: developer 1833 1834 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1835 1836 @*/ 1837 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 1838 { 1839 PetscErrorCode ierr; 1840 1841 PetscFunctionBegin; 1842 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1843 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1844 if (nlevels == 0) PetscFunctionReturn(0); 1845 PetscValidPointer(dmc,3); 1846 if (dm->ops->coarsenhierarchy) { 1847 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 1848 } else if (dm->ops->coarsen) { 1849 PetscInt i; 1850 1851 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 1852 for (i=1; i<nlevels; i++) { 1853 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 1854 } 1855 } else { 1856 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 1857 } 1858 PetscFunctionReturn(0); 1859 } 1860 1861 #undef __FUNCT__ 1862 #define __FUNCT__ "DMCreateAggregates" 1863 /*@ 1864 DMCreateAggregates - Gets the aggregates that map between 1865 grids associated with two DMs. 1866 1867 Collective on DM 1868 1869 Input Parameters: 1870 + dmc - the coarse grid DM 1871 - dmf - the fine grid DM 1872 1873 Output Parameters: 1874 . rest - the restriction matrix (transpose of the projection matrix) 1875 1876 Level: intermediate 1877 1878 .keywords: interpolation, restriction, multigrid 1879 1880 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 1881 @*/ 1882 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 1883 { 1884 PetscErrorCode ierr; 1885 1886 PetscFunctionBegin; 1887 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 1888 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 1889 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 1890 PetscFunctionReturn(0); 1891 } 1892 1893 #undef __FUNCT__ 1894 #define __FUNCT__ "DMSetApplicationContextDestroy" 1895 /*@C 1896 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 1897 1898 Not Collective 1899 1900 Input Parameters: 1901 + dm - the DM object 1902 - destroy - the destroy function 1903 1904 Level: intermediate 1905 1906 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1907 1908 @*/ 1909 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 1910 { 1911 PetscFunctionBegin; 1912 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1913 dm->ctxdestroy = destroy; 1914 PetscFunctionReturn(0); 1915 } 1916 1917 #undef __FUNCT__ 1918 #define __FUNCT__ "DMSetApplicationContext" 1919 /*@ 1920 DMSetApplicationContext - Set a user context into a DM object 1921 1922 Not Collective 1923 1924 Input Parameters: 1925 + dm - the DM object 1926 - ctx - the user context 1927 1928 Level: intermediate 1929 1930 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1931 1932 @*/ 1933 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 1934 { 1935 PetscFunctionBegin; 1936 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1937 dm->ctx = ctx; 1938 PetscFunctionReturn(0); 1939 } 1940 1941 #undef __FUNCT__ 1942 #define __FUNCT__ "DMGetApplicationContext" 1943 /*@ 1944 DMGetApplicationContext - Gets a user context from a DM object 1945 1946 Not Collective 1947 1948 Input Parameter: 1949 . dm - the DM object 1950 1951 Output Parameter: 1952 . ctx - the user context 1953 1954 Level: intermediate 1955 1956 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1957 1958 @*/ 1959 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 1960 { 1961 PetscFunctionBegin; 1962 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1963 *(void**)ctx = dm->ctx; 1964 PetscFunctionReturn(0); 1965 } 1966 1967 #undef __FUNCT__ 1968 #define __FUNCT__ "DMSetInitialGuess" 1969 /*@C 1970 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 1971 1972 Logically Collective on DM 1973 1974 Input Parameter: 1975 + dm - the DM object to destroy 1976 - f - the function to compute the initial guess 1977 1978 Level: intermediate 1979 1980 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 1981 1982 @*/ 1983 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 1984 { 1985 PetscFunctionBegin; 1986 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1987 dm->ops->initialguess = f; 1988 PetscFunctionReturn(0); 1989 } 1990 1991 #undef __FUNCT__ 1992 #define __FUNCT__ "DMSetFunction" 1993 /*@C 1994 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 1995 1996 Logically Collective on DM 1997 1998 Input Parameter: 1999 + dm - the DM object 2000 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 2001 2002 Level: intermediate 2003 2004 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 2005 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 2006 2007 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2008 DMSetJacobian() 2009 2010 @*/ 2011 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2012 { 2013 PetscFunctionBegin; 2014 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2015 dm->ops->function = f; 2016 if (f) { 2017 dm->ops->functionj = f; 2018 } 2019 PetscFunctionReturn(0); 2020 } 2021 2022 #undef __FUNCT__ 2023 #define __FUNCT__ "DMSetJacobian" 2024 /*@C 2025 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 2026 2027 Logically Collective on DM 2028 2029 Input Parameter: 2030 + dm - the DM object to destroy 2031 - f - the function to compute the matrix entries 2032 2033 Level: intermediate 2034 2035 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2036 DMSetFunction() 2037 2038 @*/ 2039 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 2040 { 2041 PetscFunctionBegin; 2042 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2043 dm->ops->jacobian = f; 2044 PetscFunctionReturn(0); 2045 } 2046 2047 #undef __FUNCT__ 2048 #define __FUNCT__ "DMSetVariableBounds" 2049 /*@C 2050 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2051 2052 Logically Collective on DM 2053 2054 Input Parameter: 2055 + dm - the DM object 2056 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 2057 2058 Level: intermediate 2059 2060 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2061 DMSetJacobian() 2062 2063 @*/ 2064 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2065 { 2066 PetscFunctionBegin; 2067 dm->ops->computevariablebounds = f; 2068 PetscFunctionReturn(0); 2069 } 2070 2071 #undef __FUNCT__ 2072 #define __FUNCT__ "DMHasVariableBounds" 2073 /*@ 2074 DMHasVariableBounds - does the DM object have a variable bounds function? 2075 2076 Not Collective 2077 2078 Input Parameter: 2079 . dm - the DM object to destroy 2080 2081 Output Parameter: 2082 . flg - PETSC_TRUE if the variable bounds function exists 2083 2084 Level: developer 2085 2086 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2087 2088 @*/ 2089 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2090 { 2091 PetscFunctionBegin; 2092 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2093 PetscFunctionReturn(0); 2094 } 2095 2096 #undef __FUNCT__ 2097 #define __FUNCT__ "DMComputeVariableBounds" 2098 /*@C 2099 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2100 2101 Logically Collective on DM 2102 2103 Input Parameters: 2104 + dm - the DM object to destroy 2105 - x - current solution at which the bounds are computed 2106 2107 Output parameters: 2108 + xl - lower bound 2109 - xu - upper bound 2110 2111 Level: intermediate 2112 2113 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2114 DMSetFunction(), DMSetVariableBounds() 2115 2116 @*/ 2117 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2118 { 2119 PetscErrorCode ierr; 2120 PetscFunctionBegin; 2121 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2122 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2123 if(dm->ops->computevariablebounds) { 2124 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2125 } 2126 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2127 PetscFunctionReturn(0); 2128 } 2129 2130 #undef __FUNCT__ 2131 #define __FUNCT__ "DMComputeInitialGuess" 2132 /*@ 2133 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2134 2135 Collective on DM 2136 2137 Input Parameter: 2138 + dm - the DM object to destroy 2139 - x - the vector to hold the initial guess values 2140 2141 Level: developer 2142 2143 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2144 2145 @*/ 2146 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2147 { 2148 PetscErrorCode ierr; 2149 PetscFunctionBegin; 2150 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2151 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2152 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2153 PetscFunctionReturn(0); 2154 } 2155 2156 #undef __FUNCT__ 2157 #define __FUNCT__ "DMHasInitialGuess" 2158 /*@ 2159 DMHasInitialGuess - does the DM object have an initial guess function 2160 2161 Not Collective 2162 2163 Input Parameter: 2164 . dm - the DM object to destroy 2165 2166 Output Parameter: 2167 . flg - PETSC_TRUE if function exists 2168 2169 Level: developer 2170 2171 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2172 2173 @*/ 2174 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2175 { 2176 PetscFunctionBegin; 2177 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2178 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2179 PetscFunctionReturn(0); 2180 } 2181 2182 #undef __FUNCT__ 2183 #define __FUNCT__ "DMHasFunction" 2184 /*@ 2185 DMHasFunction - does the DM object have a function 2186 2187 Not Collective 2188 2189 Input Parameter: 2190 . dm - the DM object to destroy 2191 2192 Output Parameter: 2193 . flg - PETSC_TRUE if function exists 2194 2195 Level: developer 2196 2197 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2198 2199 @*/ 2200 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2201 { 2202 PetscFunctionBegin; 2203 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2204 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2205 PetscFunctionReturn(0); 2206 } 2207 2208 #undef __FUNCT__ 2209 #define __FUNCT__ "DMHasJacobian" 2210 /*@ 2211 DMHasJacobian - does the DM object have a matrix function 2212 2213 Not Collective 2214 2215 Input Parameter: 2216 . dm - the DM object to destroy 2217 2218 Output Parameter: 2219 . flg - PETSC_TRUE if function exists 2220 2221 Level: developer 2222 2223 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2224 2225 @*/ 2226 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2227 { 2228 PetscFunctionBegin; 2229 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2230 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2231 PetscFunctionReturn(0); 2232 } 2233 2234 #undef __FUNCT__ 2235 #define __FUNCT__ "DMHasColoring" 2236 /*@ 2237 DMHasColoring - does the DM object have a method of providing a coloring? 2238 2239 Not Collective 2240 2241 Input Parameter: 2242 . dm - the DM object 2243 2244 Output Parameter: 2245 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2246 2247 Level: developer 2248 2249 .seealso DMHasFunction(), DMCreateColoring() 2250 2251 @*/ 2252 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2253 { 2254 PetscFunctionBegin; 2255 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2256 PetscFunctionReturn(0); 2257 } 2258 2259 #undef __FUNCT__ 2260 #define __FUNCT__ "DMSetVec" 2261 /*@C 2262 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2263 2264 Collective on DM 2265 2266 Input Parameter: 2267 + dm - the DM object 2268 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2269 2270 Level: developer 2271 2272 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2273 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2274 2275 @*/ 2276 PetscErrorCode DMSetVec(DM dm,Vec x) 2277 { 2278 PetscErrorCode ierr; 2279 PetscFunctionBegin; 2280 if (x) { 2281 if (!dm->x) { 2282 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2283 } 2284 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2285 } 2286 else if(dm->x) { 2287 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2288 } 2289 PetscFunctionReturn(0); 2290 } 2291 2292 2293 #undef __FUNCT__ 2294 #define __FUNCT__ "DMComputeFunction" 2295 /*@ 2296 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2297 2298 Collective on DM 2299 2300 Input Parameter: 2301 + dm - the DM object to destroy 2302 . x - the location where the function is evaluationed, may be ignored for linear problems 2303 - b - the vector to hold the right hand side entries 2304 2305 Level: developer 2306 2307 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2308 DMSetJacobian() 2309 2310 @*/ 2311 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2312 { 2313 PetscErrorCode ierr; 2314 PetscFunctionBegin; 2315 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2316 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2317 PetscStackPush("DM user function"); 2318 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2319 PetscStackPop; 2320 PetscFunctionReturn(0); 2321 } 2322 2323 2324 2325 #undef __FUNCT__ 2326 #define __FUNCT__ "DMComputeJacobian" 2327 /*@ 2328 DMComputeJacobian - compute the matrix entries for the solver 2329 2330 Collective on DM 2331 2332 Input Parameter: 2333 + dm - the DM object 2334 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2335 . A - matrix that defines the operator for the linear solve 2336 - B - the matrix used to construct the preconditioner 2337 2338 Level: developer 2339 2340 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2341 DMSetFunction() 2342 2343 @*/ 2344 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2345 { 2346 PetscErrorCode ierr; 2347 2348 PetscFunctionBegin; 2349 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2350 if (!dm->ops->jacobian) { 2351 ISColoring coloring; 2352 MatFDColoring fd; 2353 const MatType mtype; 2354 2355 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2356 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2357 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2358 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2359 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2360 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2361 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2362 2363 dm->fd = fd; 2364 dm->ops->jacobian = DMComputeJacobianDefault; 2365 2366 /* don't know why this is needed */ 2367 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2368 } 2369 if (!x) x = dm->x; 2370 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2371 2372 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2373 if (x) { 2374 if (!dm->x) { 2375 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2376 } 2377 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2378 } 2379 if (A != B) { 2380 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2381 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2382 } 2383 PetscFunctionReturn(0); 2384 } 2385 2386 2387 PetscFList DMList = PETSC_NULL; 2388 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2389 2390 #undef __FUNCT__ 2391 #define __FUNCT__ "DMSetType" 2392 /*@C 2393 DMSetType - Builds a DM, for a particular DM implementation. 2394 2395 Collective on DM 2396 2397 Input Parameters: 2398 + dm - The DM object 2399 - method - The name of the DM type 2400 2401 Options Database Key: 2402 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2403 2404 Notes: 2405 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2406 2407 Level: intermediate 2408 2409 .keywords: DM, set, type 2410 .seealso: DMGetType(), DMCreate() 2411 @*/ 2412 PetscErrorCode DMSetType(DM dm, const DMType method) 2413 { 2414 PetscErrorCode (*r)(DM); 2415 PetscBool match; 2416 PetscErrorCode ierr; 2417 2418 PetscFunctionBegin; 2419 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2420 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2421 if (match) PetscFunctionReturn(0); 2422 2423 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2424 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2425 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2426 2427 if (dm->ops->destroy) { 2428 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2429 dm->ops->destroy = PETSC_NULL; 2430 } 2431 ierr = (*r)(dm);CHKERRQ(ierr); 2432 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2433 PetscFunctionReturn(0); 2434 } 2435 2436 #undef __FUNCT__ 2437 #define __FUNCT__ "DMGetType" 2438 /*@C 2439 DMGetType - Gets the DM type name (as a string) from the DM. 2440 2441 Not Collective 2442 2443 Input Parameter: 2444 . dm - The DM 2445 2446 Output Parameter: 2447 . type - The DM type name 2448 2449 Level: intermediate 2450 2451 .keywords: DM, get, type, name 2452 .seealso: DMSetType(), DMCreate() 2453 @*/ 2454 PetscErrorCode DMGetType(DM dm, const DMType *type) 2455 { 2456 PetscErrorCode ierr; 2457 2458 PetscFunctionBegin; 2459 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2460 PetscValidCharPointer(type,2); 2461 if (!DMRegisterAllCalled) { 2462 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2463 } 2464 *type = ((PetscObject)dm)->type_name; 2465 PetscFunctionReturn(0); 2466 } 2467 2468 #undef __FUNCT__ 2469 #define __FUNCT__ "DMConvert" 2470 /*@C 2471 DMConvert - Converts a DM to another DM, either of the same or different type. 2472 2473 Collective on DM 2474 2475 Input Parameters: 2476 + dm - the DM 2477 - newtype - new DM type (use "same" for the same type) 2478 2479 Output Parameter: 2480 . M - pointer to new DM 2481 2482 Notes: 2483 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2484 the MPI communicator of the generated DM is always the same as the communicator 2485 of the input DM. 2486 2487 Level: intermediate 2488 2489 .seealso: DMCreate() 2490 @*/ 2491 PetscErrorCode DMConvert(DM dm, const DMType newtype, DM *M) 2492 { 2493 DM B; 2494 char convname[256]; 2495 PetscBool sametype, issame; 2496 PetscErrorCode ierr; 2497 2498 PetscFunctionBegin; 2499 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2500 PetscValidType(dm,1); 2501 PetscValidPointer(M,3); 2502 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2503 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2504 { 2505 PetscErrorCode (*conv)(DM, const DMType, DM *) = PETSC_NULL; 2506 2507 /* 2508 Order of precedence: 2509 1) See if a specialized converter is known to the current DM. 2510 2) See if a specialized converter is known to the desired DM class. 2511 3) See if a good general converter is registered for the desired class 2512 4) See if a good general converter is known for the current matrix. 2513 5) Use a really basic converter. 2514 */ 2515 2516 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2517 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2518 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2519 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2520 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2521 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2522 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2523 if (conv) goto foundconv; 2524 2525 /* 2) See if a specialized converter is known to the desired DM class. */ 2526 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2527 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2528 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2529 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2530 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2531 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2532 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2533 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2534 if (conv) { 2535 ierr = DMDestroy(&B);CHKERRQ(ierr); 2536 goto foundconv; 2537 } 2538 2539 #if 0 2540 /* 3) See if a good general converter is registered for the desired class */ 2541 conv = B->ops->convertfrom; 2542 ierr = DMDestroy(&B);CHKERRQ(ierr); 2543 if (conv) goto foundconv; 2544 2545 /* 4) See if a good general converter is known for the current matrix */ 2546 if (dm->ops->convert) { 2547 conv = dm->ops->convert; 2548 } 2549 if (conv) goto foundconv; 2550 #endif 2551 2552 /* 5) Use a really basic converter. */ 2553 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2554 2555 foundconv: 2556 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2557 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2558 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2559 } 2560 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2561 PetscFunctionReturn(0); 2562 } 2563 2564 /*--------------------------------------------------------------------------------------------------------------------*/ 2565 2566 #undef __FUNCT__ 2567 #define __FUNCT__ "DMRegister" 2568 /*@C 2569 DMRegister - See DMRegisterDynamic() 2570 2571 Level: advanced 2572 @*/ 2573 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2574 { 2575 char fullname[PETSC_MAX_PATH_LEN]; 2576 PetscErrorCode ierr; 2577 2578 PetscFunctionBegin; 2579 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2580 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2581 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2582 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2583 PetscFunctionReturn(0); 2584 } 2585 2586 2587 /*--------------------------------------------------------------------------------------------------------------------*/ 2588 #undef __FUNCT__ 2589 #define __FUNCT__ "DMRegisterDestroy" 2590 /*@C 2591 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2592 2593 Not Collective 2594 2595 Level: advanced 2596 2597 .keywords: DM, register, destroy 2598 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2599 @*/ 2600 PetscErrorCode DMRegisterDestroy(void) 2601 { 2602 PetscErrorCode ierr; 2603 2604 PetscFunctionBegin; 2605 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2606 DMRegisterAllCalled = PETSC_FALSE; 2607 PetscFunctionReturn(0); 2608 } 2609 2610 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2611 #include <mex.h> 2612 2613 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2614 2615 #undef __FUNCT__ 2616 #define __FUNCT__ "DMComputeFunction_Matlab" 2617 /* 2618 DMComputeFunction_Matlab - Calls the function that has been set with 2619 DMSetFunctionMatlab(). 2620 2621 For linear problems x is null 2622 2623 .seealso: DMSetFunction(), DMGetFunction() 2624 */ 2625 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2626 { 2627 PetscErrorCode ierr; 2628 DMMatlabContext *sctx; 2629 int nlhs = 1,nrhs = 4; 2630 mxArray *plhs[1],*prhs[4]; 2631 long long int lx = 0,ly = 0,ls = 0; 2632 2633 PetscFunctionBegin; 2634 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2635 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2636 PetscCheckSameComm(dm,1,y,3); 2637 2638 /* call Matlab function in ctx with arguments x and y */ 2639 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2640 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2641 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2642 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2643 prhs[0] = mxCreateDoubleScalar((double)ls); 2644 prhs[1] = mxCreateDoubleScalar((double)lx); 2645 prhs[2] = mxCreateDoubleScalar((double)ly); 2646 prhs[3] = mxCreateString(sctx->funcname); 2647 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2648 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2649 mxDestroyArray(prhs[0]); 2650 mxDestroyArray(prhs[1]); 2651 mxDestroyArray(prhs[2]); 2652 mxDestroyArray(prhs[3]); 2653 mxDestroyArray(plhs[0]); 2654 PetscFunctionReturn(0); 2655 } 2656 2657 2658 #undef __FUNCT__ 2659 #define __FUNCT__ "DMSetFunctionMatlab" 2660 /* 2661 DMSetFunctionMatlab - Sets the function evaluation routine 2662 2663 */ 2664 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2665 { 2666 PetscErrorCode ierr; 2667 DMMatlabContext *sctx; 2668 2669 PetscFunctionBegin; 2670 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2671 /* currently sctx is memory bleed */ 2672 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2673 if (!sctx) { 2674 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2675 } 2676 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2677 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2678 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2679 PetscFunctionReturn(0); 2680 } 2681 2682 #undef __FUNCT__ 2683 #define __FUNCT__ "DMComputeJacobian_Matlab" 2684 /* 2685 DMComputeJacobian_Matlab - Calls the function that has been set with 2686 DMSetJacobianMatlab(). 2687 2688 For linear problems x is null 2689 2690 .seealso: DMSetFunction(), DMGetFunction() 2691 */ 2692 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2693 { 2694 PetscErrorCode ierr; 2695 DMMatlabContext *sctx; 2696 int nlhs = 2,nrhs = 5; 2697 mxArray *plhs[2],*prhs[5]; 2698 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2699 2700 PetscFunctionBegin; 2701 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2702 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2703 2704 /* call MATLAB function in ctx with arguments x, A, and B */ 2705 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2706 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2707 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2708 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2709 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2710 prhs[0] = mxCreateDoubleScalar((double)ls); 2711 prhs[1] = mxCreateDoubleScalar((double)lx); 2712 prhs[2] = mxCreateDoubleScalar((double)lA); 2713 prhs[3] = mxCreateDoubleScalar((double)lB); 2714 prhs[4] = mxCreateString(sctx->jacname); 2715 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2716 *str = (MatStructure) mxGetScalar(plhs[0]); 2717 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2718 mxDestroyArray(prhs[0]); 2719 mxDestroyArray(prhs[1]); 2720 mxDestroyArray(prhs[2]); 2721 mxDestroyArray(prhs[3]); 2722 mxDestroyArray(prhs[4]); 2723 mxDestroyArray(plhs[0]); 2724 mxDestroyArray(plhs[1]); 2725 PetscFunctionReturn(0); 2726 } 2727 2728 2729 #undef __FUNCT__ 2730 #define __FUNCT__ "DMSetJacobianMatlab" 2731 /* 2732 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2733 2734 */ 2735 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2736 { 2737 PetscErrorCode ierr; 2738 DMMatlabContext *sctx; 2739 2740 PetscFunctionBegin; 2741 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2742 /* currently sctx is memory bleed */ 2743 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2744 if (!sctx) { 2745 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2746 } 2747 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2748 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2749 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2750 PetscFunctionReturn(0); 2751 } 2752 #endif 2753 2754 #undef __FUNCT__ 2755 #define __FUNCT__ "DMLoad" 2756 /*@C 2757 DMLoad - Loads a DM that has been stored in binary or HDF5 format 2758 with DMView(). 2759 2760 Collective on PetscViewer 2761 2762 Input Parameters: 2763 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2764 some related function before a call to DMLoad(). 2765 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2766 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2767 2768 Level: intermediate 2769 2770 Notes: 2771 Defaults to the DM DA. 2772 2773 Notes for advanced users: 2774 Most users should not need to know the details of the binary storage 2775 format, since DMLoad() and DMView() completely hide these details. 2776 But for anyone who's interested, the standard binary matrix storage 2777 format is 2778 .vb 2779 has not yet been determined 2780 .ve 2781 2782 In addition, PETSc automatically does the byte swapping for 2783 machines that store the bytes reversed, e.g. DEC alpha, freebsd, 2784 linux, Windows and the paragon; thus if you write your own binary 2785 read/write routines you have to swap the bytes; see PetscBinaryRead() 2786 and PetscBinaryWrite() to see how this may be done. 2787 2788 Concepts: vector^loading from file 2789 2790 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2791 @*/ 2792 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2793 { 2794 PetscErrorCode ierr; 2795 2796 PetscFunctionBegin; 2797 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2798 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2799 2800 if (!((PetscObject)newdm)->type_name) { 2801 ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr); 2802 } 2803 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2804 PetscFunctionReturn(0); 2805 } 2806 2807 /******************************** FEM Support **********************************/ 2808 2809 #undef __FUNCT__ 2810 #define __FUNCT__ "DMPrintCellVector" 2811 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 2812 PetscInt f; 2813 PetscErrorCode ierr; 2814 2815 PetscFunctionBegin; 2816 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2817 for(f = 0; f < len; ++f) { 2818 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2819 } 2820 PetscFunctionReturn(0); 2821 } 2822 2823 #undef __FUNCT__ 2824 #define __FUNCT__ "DMPrintCellMatrix" 2825 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 2826 PetscInt f, g; 2827 PetscErrorCode ierr; 2828 2829 PetscFunctionBegin; 2830 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2831 for(f = 0; f < rows; ++f) { 2832 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2833 for(g = 0; g < cols; ++g) { 2834 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2835 } 2836 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2837 } 2838 PetscFunctionReturn(0); 2839 } 2840 2841 #undef __FUNCT__ 2842 #define __FUNCT__ "DMGetLocalFunction" 2843 /*@C 2844 DMGetLocalFunction - Get the local residual function from this DM 2845 2846 Not collective 2847 2848 Input Parameter: 2849 . dm - The DM 2850 2851 Output Parameter: 2852 . lf - The local residual function 2853 2854 Calling sequence of lf: 2855 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2856 2857 + snes - the SNES context 2858 . x - local vector with the state at which to evaluate residual 2859 . f - local vector to put residual in 2860 - ctx - optional user-defined function context 2861 2862 Level: intermediate 2863 2864 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2865 @*/ 2866 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 2867 { 2868 PetscFunctionBegin; 2869 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2870 if (lf) *lf = dm->lf; 2871 PetscFunctionReturn(0); 2872 } 2873 2874 #undef __FUNCT__ 2875 #define __FUNCT__ "DMSetLocalFunction" 2876 /*@C 2877 DMSetLocalFunction - Set the local residual function from this DM 2878 2879 Not collective 2880 2881 Input Parameters: 2882 + dm - The DM 2883 - lf - The local residual function 2884 2885 Calling sequence of lf: 2886 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2887 2888 + snes - the SNES context 2889 . x - local vector with the state at which to evaluate residual 2890 . f - local vector to put residual in 2891 - ctx - optional user-defined function context 2892 2893 Level: intermediate 2894 2895 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2896 @*/ 2897 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 2898 { 2899 PetscFunctionBegin; 2900 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2901 dm->lf = lf; 2902 PetscFunctionReturn(0); 2903 } 2904 2905 #undef __FUNCT__ 2906 #define __FUNCT__ "DMGetLocalJacobian" 2907 /*@C 2908 DMGetLocalJacobian - Get the local Jacobian function from this DM 2909 2910 Not collective 2911 2912 Input Parameter: 2913 . dm - The DM 2914 2915 Output Parameter: 2916 . lj - The local Jacobian function 2917 2918 Calling sequence of lj: 2919 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 2920 2921 + snes - the SNES context 2922 . x - local vector with the state at which to evaluate residual 2923 . J - matrix to put Jacobian in 2924 . M - matrix to use for defining Jacobian preconditioner 2925 - ctx - optional user-defined function context 2926 2927 Level: intermediate 2928 2929 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 2930 @*/ 2931 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 2932 { 2933 PetscFunctionBegin; 2934 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2935 if (lj) *lj = dm->lj; 2936 PetscFunctionReturn(0); 2937 } 2938 2939 #undef __FUNCT__ 2940 #define __FUNCT__ "DMSetLocalJacobian" 2941 /*@C 2942 DMSetLocalJacobian - Set the local Jacobian function from this DM 2943 2944 Not collective 2945 2946 Input Parameters: 2947 + dm - The DM 2948 - lj - The local Jacobian function 2949 2950 Calling sequence of lj: 2951 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 2952 2953 + snes - the SNES context 2954 . x - local vector with the state at which to evaluate residual 2955 . J - matrix to put Jacobian in 2956 . M - matrix to use for defining Jacobian preconditioner 2957 - ctx - optional user-defined function context 2958 2959 Level: intermediate 2960 2961 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 2962 @*/ 2963 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 2964 { 2965 PetscFunctionBegin; 2966 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2967 dm->lj = lj; 2968 PetscFunctionReturn(0); 2969 } 2970 2971 #undef __FUNCT__ 2972 #define __FUNCT__ "DMGetDefaultSection" 2973 /*@ 2974 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2975 2976 Input Parameter: 2977 . dm - The DM 2978 2979 Output Parameter: 2980 . section - The PetscSection 2981 2982 Level: intermediate 2983 2984 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2985 2986 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2987 @*/ 2988 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 2989 PetscFunctionBegin; 2990 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2991 PetscValidPointer(section, 2); 2992 *section = dm->defaultSection; 2993 PetscFunctionReturn(0); 2994 } 2995 2996 #undef __FUNCT__ 2997 #define __FUNCT__ "DMSetDefaultSection" 2998 /*@ 2999 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 3000 3001 Input Parameters: 3002 + dm - The DM 3003 - section - The PetscSection 3004 3005 Level: intermediate 3006 3007 Note: Any existing Section will be destroyed 3008 3009 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3010 @*/ 3011 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 3012 PetscErrorCode ierr; 3013 3014 PetscFunctionBegin; 3015 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3016 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 3017 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3018 dm->defaultSection = section; 3019 PetscFunctionReturn(0); 3020 } 3021 3022 #undef __FUNCT__ 3023 #define __FUNCT__ "DMGetDefaultGlobalSection" 3024 /*@ 3025 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 3026 3027 Input Parameter: 3028 . dm - The DM 3029 3030 Output Parameter: 3031 . section - The PetscSection 3032 3033 Level: intermediate 3034 3035 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3036 3037 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 3038 @*/ 3039 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 3040 PetscErrorCode ierr; 3041 3042 PetscFunctionBegin; 3043 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3044 PetscValidPointer(section, 2); 3045 if (!dm->defaultGlobalSection) { 3046 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, &dm->defaultGlobalSection);CHKERRQ(ierr); 3047 } 3048 *section = dm->defaultGlobalSection; 3049 PetscFunctionReturn(0); 3050 } 3051 3052 #undef __FUNCT__ 3053 #define __FUNCT__ "DMGetDefaultSF" 3054 /*@ 3055 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3056 it is created from the default PetscSection layouts in the DM. 3057 3058 Input Parameter: 3059 . dm - The DM 3060 3061 Output Parameter: 3062 . sf - The PetscSF 3063 3064 Level: intermediate 3065 3066 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3067 3068 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3069 @*/ 3070 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3071 PetscInt nroots; 3072 PetscErrorCode ierr; 3073 3074 PetscFunctionBegin; 3075 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3076 PetscValidPointer(sf, 2); 3077 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3078 if (nroots < 0) { 3079 PetscSection section, gSection; 3080 3081 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3082 if (section) { 3083 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3084 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3085 } else { 3086 *sf = PETSC_NULL; 3087 PetscFunctionReturn(0); 3088 } 3089 } 3090 *sf = dm->defaultSF; 3091 PetscFunctionReturn(0); 3092 } 3093 3094 #undef __FUNCT__ 3095 #define __FUNCT__ "DMSetDefaultSF" 3096 /*@ 3097 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3098 3099 Input Parameters: 3100 + dm - The DM 3101 - sf - The PetscSF 3102 3103 Level: intermediate 3104 3105 Note: Any previous SF is destroyed 3106 3107 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3108 @*/ 3109 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3110 PetscErrorCode ierr; 3111 3112 PetscFunctionBegin; 3113 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3114 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3115 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3116 dm->defaultSF = sf; 3117 PetscFunctionReturn(0); 3118 } 3119 3120 #undef __FUNCT__ 3121 #define __FUNCT__ "DMCreateDefaultSF" 3122 /*@C 3123 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3124 describing the data layout. 3125 3126 Input Parameters: 3127 + dm - The DM 3128 . localSection - PetscSection describing the local data layout 3129 - globalSection - PetscSection describing the global data layout 3130 3131 Level: intermediate 3132 3133 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3134 @*/ 3135 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3136 { 3137 MPI_Comm comm = ((PetscObject) dm)->comm; 3138 PetscLayout layout; 3139 const PetscInt *ranges; 3140 PetscInt *local; 3141 PetscSFNode *remote; 3142 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3143 PetscMPIInt size, rank; 3144 PetscErrorCode ierr; 3145 3146 PetscFunctionBegin; 3147 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3148 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3149 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3150 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3151 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3152 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3153 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3154 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3155 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3156 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3157 for(p = pStart, nleaves = 0; p < pEnd; ++p) { 3158 PetscInt dof, cdof; 3159 3160 ierr = PetscSectionGetDof(globalSection, p, &dof);CHKERRQ(ierr); 3161 ierr = PetscSectionGetConstraintDof(globalSection, p, &cdof);CHKERRQ(ierr); 3162 nleaves += dof < 0 ? -(dof+1)-cdof : dof-cdof; 3163 } 3164 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3165 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3166 for(p = pStart, l = 0; p < pEnd; ++p) { 3167 PetscInt *cind; 3168 PetscInt dof, gdof, cdof, dim, off, goff, d, c; 3169 3170 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3171 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3172 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3173 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3174 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3175 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3176 dim = dof-cdof; 3177 for(d = 0, c = 0; d < dof; ++d) { 3178 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3179 local[l+d-c] = off+d; 3180 } 3181 if (gdof < 0) { 3182 for(d = 0; d < dim; ++d, ++l) { 3183 PetscInt offset = -(goff+1) + d, r; 3184 3185 for(r = 0; r < size; ++r) { 3186 if ((offset >= ranges[r]) && (offset < ranges[r+1])) break; 3187 } 3188 remote[l].rank = r; 3189 remote[l].index = offset - ranges[r]; 3190 } 3191 } else { 3192 for(d = 0; d < dim; ++d, ++l) { 3193 remote[l].rank = rank; 3194 remote[l].index = goff+d - ranges[rank]; 3195 } 3196 } 3197 } 3198 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3199 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3200 PetscFunctionReturn(0); 3201 } 3202