1 #include <petscsnes.h> 2 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = PETSC_NULL; 35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 36 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 39 #endif 40 41 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 42 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 43 44 45 v->ltogmap = PETSC_NULL; 46 v->ltogmapb = PETSC_NULL; 47 v->bs = 1; 48 v->coloringtype = IS_COLORING_GLOBAL; 49 v->lf = PETSC_NULL; 50 v->lj = PETSC_NULL; 51 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 52 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 53 v->defaultSection = PETSC_NULL; 54 v->defaultGlobalSection = PETSC_NULL; 55 { 56 PetscInt i; 57 for (i = 0; i < 10; ++i) { 58 v->nullspaceConstructors[i] = PETSC_NULL; 59 } 60 } 61 v->numFields = 0; 62 v->fields = PETSC_NULL; 63 64 *dm = v; 65 PetscFunctionReturn(0); 66 } 67 68 69 #undef __FUNCT__ 70 #define __FUNCT__ "DMSetVecType" 71 /*@C 72 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 73 74 Logically Collective on DMDA 75 76 Input Parameter: 77 + da - initial distributed array 78 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 79 80 Options Database: 81 . -dm_vec_type ctype 82 83 Level: intermediate 84 85 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 86 @*/ 87 PetscErrorCode DMSetVecType(DM da,const VecType ctype) 88 { 89 PetscErrorCode ierr; 90 91 PetscFunctionBegin; 92 PetscValidHeaderSpecific(da,DM_CLASSID,1); 93 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 94 ierr = PetscStrallocpy(ctype,&da->vectype);CHKERRQ(ierr); 95 PetscFunctionReturn(0); 96 } 97 98 #undef __FUNCT__ 99 #define __FUNCT__ "DMSetMatType" 100 /*@C 101 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 102 103 Logically Collective on DM 104 105 Input Parameter: 106 + dm - the DM context 107 . ctype - the matrix type 108 109 Options Database: 110 . -dm_mat_type ctype 111 112 Level: intermediate 113 114 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 115 @*/ 116 PetscErrorCode DMSetMatType(DM dm,const MatType ctype) 117 { 118 PetscErrorCode ierr; 119 PetscFunctionBegin; 120 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 121 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 122 ierr = PetscStrallocpy(ctype,&dm->mattype);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 126 #undef __FUNCT__ 127 #define __FUNCT__ "DMSetOptionsPrefix" 128 /*@C 129 DMSetOptionsPrefix - Sets the prefix used for searching for all 130 DMDA options in the database. 131 132 Logically Collective on DMDA 133 134 Input Parameter: 135 + da - the DMDA context 136 - prefix - the prefix to prepend to all option names 137 138 Notes: 139 A hyphen (-) must NOT be given at the beginning of the prefix name. 140 The first character of all runtime options is AUTOMATICALLY the hyphen. 141 142 Level: advanced 143 144 .keywords: DMDA, set, options, prefix, database 145 146 .seealso: DMSetFromOptions() 147 @*/ 148 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 149 { 150 PetscErrorCode ierr; 151 152 PetscFunctionBegin; 153 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 154 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 155 PetscFunctionReturn(0); 156 } 157 158 #undef __FUNCT__ 159 #define __FUNCT__ "DMDestroy" 160 /*@ 161 DMDestroy - Destroys a vector packer or DMDA. 162 163 Collective on DM 164 165 Input Parameter: 166 . dm - the DM object to destroy 167 168 Level: developer 169 170 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 171 172 @*/ 173 PetscErrorCode DMDestroy(DM *dm) 174 { 175 PetscInt i, cnt = 0, f; 176 DMNamedVecLink nlink,nnext; 177 PetscErrorCode ierr; 178 179 PetscFunctionBegin; 180 if (!*dm) PetscFunctionReturn(0); 181 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 182 183 /* count all the circular references of DM and its contained Vecs */ 184 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 185 if ((*dm)->localin[i]) {cnt++;} 186 if ((*dm)->globalin[i]) {cnt++;} 187 } 188 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 189 if ((*dm)->x) { 190 PetscObject obj; 191 ierr = PetscObjectQuery((PetscObject)(*dm)->x,"DM",&obj);CHKERRQ(ierr); 192 if (obj == (PetscObject)*dm) cnt++; 193 } 194 195 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 196 /* 197 Need this test because the dm references the vectors that 198 reference the dm, so destroying the dm calls destroy on the 199 vectors that cause another destroy on the dm 200 */ 201 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 202 ((PetscObject) (*dm))->refct = 0; 203 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 204 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 205 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 206 } 207 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 208 nnext = nlink->next; 209 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 210 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 211 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 212 ierr = PetscFree(nlink);CHKERRQ(ierr); 213 } 214 (*dm)->namedglobal = PETSC_NULL; 215 216 /* Destroy the list of hooks */ 217 { 218 DMCoarsenHookLink link,next; 219 for (link=(*dm)->coarsenhook; link; link=next) { 220 next = link->next; 221 ierr = PetscFree(link);CHKERRQ(ierr); 222 } 223 (*dm)->coarsenhook = PETSC_NULL; 224 } 225 { 226 DMRefineHookLink link,next; 227 for (link=(*dm)->refinehook; link; link=next) { 228 next = link->next; 229 ierr = PetscFree(link);CHKERRQ(ierr); 230 } 231 (*dm)->refinehook = PETSC_NULL; 232 } 233 /* Destroy the work arrays */ 234 { 235 DMWorkLink link,next; 236 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 237 for (link=(*dm)->workin; link; link=next) { 238 next = link->next; 239 ierr = PetscFree(link->mem);CHKERRQ(ierr); 240 ierr = PetscFree(link);CHKERRQ(ierr); 241 } 242 (*dm)->workin = PETSC_NULL; 243 } 244 245 if ((*dm)->ctx && (*dm)->ctxdestroy) { 246 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 247 } 248 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 249 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 250 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 251 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 252 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 253 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 254 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 255 256 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 257 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 258 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 259 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 260 261 for (f = 0; f < (*dm)->numFields; ++f) { 262 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 263 } 264 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 265 /* if memory was published with AMS then destroy it */ 266 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 267 268 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 269 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 270 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 271 PetscFunctionReturn(0); 272 } 273 274 #undef __FUNCT__ 275 #define __FUNCT__ "DMSetUp" 276 /*@ 277 DMSetUp - sets up the data structures inside a DM object 278 279 Collective on DM 280 281 Input Parameter: 282 . dm - the DM object to setup 283 284 Level: developer 285 286 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 287 288 @*/ 289 PetscErrorCode DMSetUp(DM dm) 290 { 291 PetscErrorCode ierr; 292 293 PetscFunctionBegin; 294 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 295 if (dm->setupcalled) PetscFunctionReturn(0); 296 if (dm->ops->setup) { 297 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 298 } 299 dm->setupcalled = PETSC_TRUE; 300 PetscFunctionReturn(0); 301 } 302 303 #undef __FUNCT__ 304 #define __FUNCT__ "DMSetFromOptions" 305 /*@ 306 DMSetFromOptions - sets parameters in a DM from the options database 307 308 Collective on DM 309 310 Input Parameter: 311 . dm - the DM object to set options for 312 313 Options Database: 314 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 315 . -dm_vec_type <type> type of vector to create inside DM 316 . -dm_mat_type <type> type of matrix to create inside DM 317 - -dm_coloring_type <global or ghosted> 318 319 Level: developer 320 321 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 322 323 @*/ 324 PetscErrorCode DMSetFromOptions(DM dm) 325 { 326 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 327 PetscErrorCode ierr; 328 char typeName[256] = MATAIJ; 329 330 PetscFunctionBegin; 331 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 332 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 333 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 334 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 335 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 336 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 337 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 338 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 339 if (flg) { 340 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 341 } 342 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 343 if (flg) { 344 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 345 } 346 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 347 if (dm->ops->setfromoptions) { 348 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 349 } 350 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 351 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 352 ierr = PetscOptionsEnd();CHKERRQ(ierr); 353 if (flg1) { 354 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 355 } 356 if (flg2) { 357 PetscViewer viewer; 358 359 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 360 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 361 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 362 ierr = DMView(dm, viewer);CHKERRQ(ierr); 363 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 364 } 365 if (flg3) { 366 PetscViewer viewer; 367 368 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 369 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 370 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 371 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 372 ierr = DMView(dm, viewer);CHKERRQ(ierr); 373 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 374 } 375 if (flg4) { 376 PetscViewer viewer; 377 378 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 379 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 380 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 381 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 382 ierr = DMView(dm, viewer);CHKERRQ(ierr); 383 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 384 } 385 PetscFunctionReturn(0); 386 } 387 388 #undef __FUNCT__ 389 #define __FUNCT__ "DMView" 390 /*@C 391 DMView - Views a vector packer or DMDA. 392 393 Collective on DM 394 395 Input Parameter: 396 + dm - the DM object to view 397 - v - the viewer 398 399 Level: developer 400 401 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 402 403 @*/ 404 PetscErrorCode DMView(DM dm,PetscViewer v) 405 { 406 PetscErrorCode ierr; 407 408 PetscFunctionBegin; 409 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 410 if (!v) { 411 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 412 } 413 if (dm->ops->view) { 414 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 415 } 416 PetscFunctionReturn(0); 417 } 418 419 #undef __FUNCT__ 420 #define __FUNCT__ "DMCreateGlobalVector" 421 /*@ 422 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 423 424 Collective on DM 425 426 Input Parameter: 427 . dm - the DM object 428 429 Output Parameter: 430 . vec - the global vector 431 432 Level: beginner 433 434 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 435 436 @*/ 437 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 438 { 439 PetscErrorCode ierr; 440 441 PetscFunctionBegin; 442 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 443 if (dm->defaultSection) { 444 PetscSection gSection; 445 PetscInt localSize; 446 447 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 448 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 449 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 450 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 451 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 452 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 453 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 454 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 455 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 456 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 457 } else { 458 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 459 } 460 PetscFunctionReturn(0); 461 } 462 463 #undef __FUNCT__ 464 #define __FUNCT__ "DMCreateLocalVector" 465 /*@ 466 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 467 468 Not Collective 469 470 Input Parameter: 471 . dm - the DM object 472 473 Output Parameter: 474 . vec - the local vector 475 476 Level: beginner 477 478 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 479 480 @*/ 481 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 482 { 483 PetscErrorCode ierr; 484 485 PetscFunctionBegin; 486 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 487 if (dm->defaultSection) { 488 PetscInt localSize; 489 490 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 491 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 492 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 493 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 494 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 495 } else { 496 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 497 } 498 PetscFunctionReturn(0); 499 } 500 501 #undef __FUNCT__ 502 #define __FUNCT__ "DMGetLocalToGlobalMapping" 503 /*@ 504 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 505 506 Collective on DM 507 508 Input Parameter: 509 . dm - the DM that provides the mapping 510 511 Output Parameter: 512 . ltog - the mapping 513 514 Level: intermediate 515 516 Notes: 517 This mapping can then be used by VecSetLocalToGlobalMapping() or 518 MatSetLocalToGlobalMapping(). 519 520 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 521 @*/ 522 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 523 { 524 PetscErrorCode ierr; 525 526 PetscFunctionBegin; 527 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 528 PetscValidPointer(ltog,2); 529 if (!dm->ltogmap) { 530 PetscSection section, sectionGlobal; 531 532 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 533 if (section) { 534 PetscInt *ltog; 535 PetscInt pStart, pEnd, size, p, l; 536 537 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 538 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 539 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 540 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 541 for (p = pStart, l = 0; p < pEnd; ++p) { 542 PetscInt dof, off, c; 543 544 /* Should probably use constrained dofs */ 545 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 546 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 547 for (c = 0; c < dof; ++c, ++l) { 548 ltog[l] = off+c; 549 } 550 } 551 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 552 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 553 } else { 554 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 555 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 556 } 557 } 558 *ltog = dm->ltogmap; 559 PetscFunctionReturn(0); 560 } 561 562 #undef __FUNCT__ 563 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 564 /*@ 565 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 566 567 Collective on DM 568 569 Input Parameter: 570 . da - the distributed array that provides the mapping 571 572 Output Parameter: 573 . ltog - the block mapping 574 575 Level: intermediate 576 577 Notes: 578 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 579 MatSetLocalToGlobalMappingBlock(). 580 581 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 582 @*/ 583 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 584 { 585 PetscErrorCode ierr; 586 587 PetscFunctionBegin; 588 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 589 PetscValidPointer(ltog,2); 590 if (!dm->ltogmapb) { 591 PetscInt bs; 592 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 593 if (bs > 1) { 594 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 595 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 596 } else { 597 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 598 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 599 } 600 } 601 *ltog = dm->ltogmapb; 602 PetscFunctionReturn(0); 603 } 604 605 #undef __FUNCT__ 606 #define __FUNCT__ "DMGetBlockSize" 607 /*@ 608 DMGetBlockSize - Gets the inherent block size associated with a DM 609 610 Not Collective 611 612 Input Parameter: 613 . dm - the DM with block structure 614 615 Output Parameter: 616 . bs - the block size, 1 implies no exploitable block structure 617 618 Level: intermediate 619 620 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 621 @*/ 622 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 623 { 624 PetscFunctionBegin; 625 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 626 PetscValidPointer(bs,2); 627 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 628 *bs = dm->bs; 629 PetscFunctionReturn(0); 630 } 631 632 #undef __FUNCT__ 633 #define __FUNCT__ "DMCreateInterpolation" 634 /*@ 635 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 636 637 Collective on DM 638 639 Input Parameter: 640 + dm1 - the DM object 641 - dm2 - the second, finer DM object 642 643 Output Parameter: 644 + mat - the interpolation 645 - vec - the scaling (optional) 646 647 Level: developer 648 649 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 650 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 651 652 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMDAGetCoordinateDA()) to interpolate the mesh coordinate vectors 653 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 654 655 656 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 657 658 @*/ 659 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 660 { 661 PetscErrorCode ierr; 662 663 PetscFunctionBegin; 664 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 665 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 666 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 667 PetscFunctionReturn(0); 668 } 669 670 #undef __FUNCT__ 671 #define __FUNCT__ "DMCreateInjection" 672 /*@ 673 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 674 675 Collective on DM 676 677 Input Parameter: 678 + dm1 - the DM object 679 - dm2 - the second, finer DM object 680 681 Output Parameter: 682 . ctx - the injection 683 684 Level: developer 685 686 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 687 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 688 689 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 690 691 @*/ 692 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 693 { 694 PetscErrorCode ierr; 695 696 PetscFunctionBegin; 697 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 698 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 699 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 700 PetscFunctionReturn(0); 701 } 702 703 #undef __FUNCT__ 704 #define __FUNCT__ "DMCreateColoring" 705 /*@C 706 DMCreateColoring - Gets coloring for a DMDA or DMComposite 707 708 Collective on DM 709 710 Input Parameter: 711 + dm - the DM object 712 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 713 - matype - either MATAIJ or MATBAIJ 714 715 Output Parameter: 716 . coloring - the coloring 717 718 Level: developer 719 720 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 721 722 @*/ 723 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring) 724 { 725 PetscErrorCode ierr; 726 727 PetscFunctionBegin; 728 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 729 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 730 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 731 PetscFunctionReturn(0); 732 } 733 734 #undef __FUNCT__ 735 #define __FUNCT__ "DMCreateMatrix" 736 /*@C 737 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 738 739 Collective on DM 740 741 Input Parameter: 742 + dm - the DM object 743 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 744 any type which inherits from one of these (such as MATAIJ) 745 746 Output Parameter: 747 . mat - the empty Jacobian 748 749 Level: beginner 750 751 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 752 do not need to do it yourself. 753 754 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 755 the nonzero pattern call DMDASetMatPreallocateOnly() 756 757 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 758 internally by PETSc. 759 760 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 761 the indices for the global numbering for DMDAs which is complicated. 762 763 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 764 765 @*/ 766 PetscErrorCode DMCreateMatrix(DM dm,const MatType mtype,Mat *mat) 767 { 768 PetscErrorCode ierr; 769 770 PetscFunctionBegin; 771 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 772 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 773 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 774 #endif 775 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 776 PetscValidPointer(mat,3); 777 if (dm->mattype) { 778 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 779 } else { 780 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 781 } 782 PetscFunctionReturn(0); 783 } 784 785 #undef __FUNCT__ 786 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 787 /*@ 788 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 789 preallocated but the nonzero structure and zero values will not be set. 790 791 Logically Collective on DMDA 792 793 Input Parameter: 794 + dm - the DM 795 - only - PETSC_TRUE if only want preallocation 796 797 Level: developer 798 .seealso DMCreateMatrix() 799 @*/ 800 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 801 { 802 PetscFunctionBegin; 803 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 804 dm->prealloc_only = only; 805 PetscFunctionReturn(0); 806 } 807 808 #undef __FUNCT__ 809 #define __FUNCT__ "DMGetWorkArray" 810 /*@C 811 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 812 813 Not Collective 814 815 Input Parameters: 816 + dm - the DM object 817 . count - The minium size 818 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 819 820 Output Parameter: 821 . array - the work array 822 823 Level: developer 824 825 .seealso DMDestroy(), DMCreate() 826 @*/ 827 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 828 { 829 PetscErrorCode ierr; 830 DMWorkLink link; 831 size_t size; 832 833 PetscFunctionBegin; 834 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 835 PetscValidPointer(mem,4); 836 if (dm->workin) { 837 link = dm->workin; 838 dm->workin = dm->workin->next; 839 } else { 840 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 841 } 842 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 843 if (size*count > link->bytes) { 844 ierr = PetscFree(link->mem);CHKERRQ(ierr); 845 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 846 link->bytes = size*count; 847 } 848 link->next = dm->workout; 849 dm->workout = link; 850 *(void**)mem = link->mem; 851 PetscFunctionReturn(0); 852 } 853 854 #undef __FUNCT__ 855 #define __FUNCT__ "DMRestoreWorkArray" 856 /*@C 857 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 858 859 Not Collective 860 861 Input Parameters: 862 + dm - the DM object 863 . count - The minium size 864 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 865 866 Output Parameter: 867 . array - the work array 868 869 Level: developer 870 871 .seealso DMDestroy(), DMCreate() 872 @*/ 873 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 874 { 875 DMWorkLink *p,link; 876 877 PetscFunctionBegin; 878 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 879 PetscValidPointer(mem,4); 880 for (p=&dm->workout; (link=*p); p=&link->next) { 881 if (link->mem == *(void**)mem) { 882 *p = link->next; 883 link->next = dm->workin; 884 dm->workin = link; 885 *(void**)mem = PETSC_NULL; 886 PetscFunctionReturn(0); 887 } 888 } 889 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 890 PetscFunctionReturn(0); 891 } 892 893 #undef __FUNCT__ 894 #define __FUNCT__ "DMSetNullSpaceConstructor" 895 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 896 { 897 PetscFunctionBegin; 898 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 899 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 900 dm->nullspaceConstructors[field] = nullsp; 901 PetscFunctionReturn(0); 902 } 903 904 #undef __FUNCT__ 905 #define __FUNCT__ "DMCreateFieldIS" 906 /*@C 907 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 908 909 Not collective 910 911 Input Parameter: 912 . dm - the DM object 913 914 Output Parameters: 915 + numFields - The number of fields (or PETSC_NULL if not requested) 916 . fieldNames - The name for each field (or PETSC_NULL if not requested) 917 - fields - The global indices for each field (or PETSC_NULL if not requested) 918 919 Level: intermediate 920 921 Notes: 922 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 923 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 924 PetscFree(). 925 926 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 927 @*/ 928 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 929 { 930 PetscSection section, sectionGlobal; 931 PetscErrorCode ierr; 932 933 PetscFunctionBegin; 934 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 935 if (numFields) { 936 PetscValidPointer(numFields,2); 937 *numFields = 0; 938 } 939 if (fieldNames) { 940 PetscValidPointer(fieldNames,3); 941 *fieldNames = PETSC_NULL; 942 } 943 if (fields) { 944 PetscValidPointer(fields,4); 945 *fields = PETSC_NULL; 946 } 947 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 948 if (section) { 949 PetscInt *fieldSizes, **fieldIndices; 950 PetscInt nF, f, pStart, pEnd, p; 951 952 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 953 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 954 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 955 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 956 for (f = 0; f < nF; ++f) { 957 fieldSizes[f] = 0; 958 } 959 for (p = pStart; p < pEnd; ++p) { 960 PetscInt gdof; 961 962 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 963 if (gdof > 0) { 964 for (f = 0; f < nF; ++f) { 965 PetscInt fdof, fcdof; 966 967 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 968 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 969 fieldSizes[f] += fdof-fcdof; 970 } 971 } 972 } 973 for (f = 0; f < nF; ++f) { 974 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 975 fieldSizes[f] = 0; 976 } 977 for (p = pStart; p < pEnd; ++p) { 978 PetscInt gdof, goff; 979 980 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 981 if (gdof > 0) { 982 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 983 for (f = 0; f < nF; ++f) { 984 PetscInt fdof, fcdof, fc; 985 986 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 987 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 988 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 989 fieldIndices[f][fieldSizes[f]] = goff++; 990 } 991 } 992 } 993 } 994 if (numFields) {*numFields = nF;} 995 if (fieldNames) { 996 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 997 for (f = 0; f < nF; ++f) { 998 const char *fieldName; 999 1000 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1001 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 1002 } 1003 } 1004 if (fields) { 1005 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1006 for (f = 0; f < nF; ++f) { 1007 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1008 } 1009 } 1010 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1011 } else { 1012 if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 1013 } 1014 PetscFunctionReturn(0); 1015 } 1016 1017 #undef __FUNCT__ 1018 #define __FUNCT__ "DMCreateFieldDecompositionDM" 1019 /*@C 1020 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 1021 1022 Not Collective 1023 1024 Input Parameters: 1025 + dm - the DM object 1026 - name - the name of the field decomposition 1027 1028 Output Parameter: 1029 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 1030 1031 Level: advanced 1032 1033 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1034 @*/ 1035 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 1036 { 1037 PetscErrorCode ierr; 1038 1039 PetscFunctionBegin; 1040 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1041 PetscValidCharPointer(name,2); 1042 PetscValidPointer(ddm,3); 1043 *ddm = PETSC_NULL; 1044 if (dm->ops->createfielddecompositiondm) { 1045 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1046 } 1047 PetscFunctionReturn(0); 1048 } 1049 1050 #undef __FUNCT__ 1051 #define __FUNCT__ "DMCreateFieldDecomposition" 1052 /*@C 1053 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1054 corresponding to different fields: each IS contains the global indices of the dofs of the 1055 corresponding field. The optional list of DMs define the DM for each subproblem. 1056 Generalizes DMCreateFieldIS(). 1057 1058 Not collective 1059 1060 Input Parameter: 1061 . dm - the DM object 1062 1063 Output Parameters: 1064 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1065 . namelist - The name for each field (or PETSC_NULL if not requested) 1066 . islist - The global indices for each field (or PETSC_NULL if not requested) 1067 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1068 1069 Level: intermediate 1070 1071 Notes: 1072 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1073 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1074 and all of the arrays should be freed with PetscFree(). 1075 1076 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1077 @*/ 1078 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1079 { 1080 PetscErrorCode ierr; 1081 1082 PetscFunctionBegin; 1083 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1084 if (len) {PetscValidPointer(len,2); *len = 0;} 1085 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1086 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1087 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1088 if (!dm->ops->createfielddecomposition) { 1089 PetscSection section; 1090 PetscInt numFields, f; 1091 1092 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1093 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1094 if (section && numFields && dm->ops->createsubdm) { 1095 *len = numFields; 1096 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1097 for (f = 0; f < numFields; ++f) { 1098 const char *fieldName; 1099 1100 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1101 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1102 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1103 } 1104 } else { 1105 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1106 /* By default there are no DMs associated with subproblems. */ 1107 if (dmlist) *dmlist = PETSC_NULL; 1108 } 1109 } 1110 else { 1111 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1112 } 1113 PetscFunctionReturn(0); 1114 } 1115 1116 #undef __FUNCT__ 1117 #define __FUNCT__ "DMCreateSubDM" 1118 /*@C 1119 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1120 The fields are defined by DMCreateFieldIS(). 1121 1122 Not collective 1123 1124 Input Parameters: 1125 + dm - the DM object 1126 . numFields - number of fields in this subproblem 1127 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1128 1129 Output Parameters: 1130 . is - The global indices for the subproblem 1131 - dm - The DM for the subproblem 1132 1133 Level: intermediate 1134 1135 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1136 @*/ 1137 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1138 { 1139 PetscErrorCode ierr; 1140 1141 PetscFunctionBegin; 1142 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1143 PetscValidPointer(fields,3); 1144 if (is) {PetscValidPointer(is,4);} 1145 if (subdm) {PetscValidPointer(subdm,5);} 1146 if (dm->ops->createsubdm) { 1147 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1148 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1149 PetscFunctionReturn(0); 1150 } 1151 1152 #undef __FUNCT__ 1153 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1154 /*@C 1155 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1156 1157 Not Collective 1158 1159 Input Parameters: 1160 + dm - the DM object 1161 - name - the name of the subdomain decomposition 1162 1163 Output Parameter: 1164 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1165 1166 Level: advanced 1167 1168 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1169 @*/ 1170 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1171 { 1172 PetscErrorCode ierr; 1173 1174 PetscFunctionBegin; 1175 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1176 PetscValidCharPointer(name,2); 1177 PetscValidPointer(ddm,3); 1178 *ddm = PETSC_NULL; 1179 if (dm->ops->createdomaindecompositiondm) { 1180 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1181 } 1182 PetscFunctionReturn(0); 1183 } 1184 1185 #undef __FUNCT__ 1186 #define __FUNCT__ "DMCreateDomainDecomposition" 1187 /*@C 1188 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1189 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1190 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1191 define a nonoverlapping covering, while outer subdomains can overlap. 1192 The optional list of DMs define the DM for each subproblem. 1193 1194 Not collective 1195 1196 Input Parameter: 1197 . dm - the DM object 1198 1199 Output Parameters: 1200 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1201 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1202 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1203 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1204 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1205 1206 Level: intermediate 1207 1208 Notes: 1209 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1210 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1211 and all of the arrays should be freed with PetscFree(). 1212 1213 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1214 @*/ 1215 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1216 { 1217 PetscErrorCode ierr; 1218 1219 PetscFunctionBegin; 1220 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1221 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1222 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1223 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1224 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1225 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1226 if (dm->ops->createdomaindecomposition) { 1227 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1228 } 1229 PetscFunctionReturn(0); 1230 } 1231 1232 #undef __FUNCT__ 1233 #define __FUNCT__ "DMRefine" 1234 /*@ 1235 DMRefine - Refines a DM object 1236 1237 Collective on DM 1238 1239 Input Parameter: 1240 + dm - the DM object 1241 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1242 1243 Output Parameter: 1244 . dmf - the refined DM, or PETSC_NULL 1245 1246 Note: If no refinement was done, the return value is PETSC_NULL 1247 1248 Level: developer 1249 1250 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1251 @*/ 1252 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1253 { 1254 PetscErrorCode ierr; 1255 DMRefineHookLink link; 1256 1257 PetscFunctionBegin; 1258 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1259 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1260 if (*dmf) { 1261 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1262 (*dmf)->ops->initialguess = dm->ops->initialguess; 1263 (*dmf)->ops->function = dm->ops->function; 1264 (*dmf)->ops->functionj = dm->ops->functionj; 1265 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1266 (*dmf)->ops->jacobian = dm->ops->jacobian; 1267 } 1268 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1269 (*dmf)->ctx = dm->ctx; 1270 (*dmf)->leveldown = dm->leveldown; 1271 (*dmf)->levelup = dm->levelup + 1; 1272 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1273 for (link=dm->refinehook; link; link=link->next) { 1274 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1275 } 1276 } 1277 PetscFunctionReturn(0); 1278 } 1279 1280 #undef __FUNCT__ 1281 #define __FUNCT__ "DMRefineHookAdd" 1282 /*@ 1283 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1284 1285 Logically Collective 1286 1287 Input Arguments: 1288 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1289 . refinehook - function to run when setting up a coarser level 1290 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1291 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1292 1293 Calling sequence of refinehook: 1294 $ refinehook(DM coarse,DM fine,void *ctx); 1295 1296 + coarse - coarse level DM 1297 . fine - fine level DM to interpolate problem to 1298 - ctx - optional user-defined function context 1299 1300 Calling sequence for interphook: 1301 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1302 1303 + coarse - coarse level DM 1304 . interp - matrix interpolating a coarse-level solution to the finer grid 1305 . fine - fine level DM to update 1306 - ctx - optional user-defined function context 1307 1308 Level: advanced 1309 1310 Notes: 1311 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1312 1313 If this function is called multiple times, the hooks will be run in the order they are added. 1314 1315 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1316 @*/ 1317 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1318 { 1319 PetscErrorCode ierr; 1320 DMRefineHookLink link,*p; 1321 1322 PetscFunctionBegin; 1323 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1324 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1325 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1326 link->refinehook = refinehook; 1327 link->interphook = interphook; 1328 link->ctx = ctx; 1329 link->next = PETSC_NULL; 1330 *p = link; 1331 PetscFunctionReturn(0); 1332 } 1333 1334 #undef __FUNCT__ 1335 #define __FUNCT__ "DMInterpolate" 1336 /*@ 1337 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1338 1339 Collective if any hooks are 1340 1341 Input Arguments: 1342 + coarse - coarser DM to use as a base 1343 . restrct - interpolation matrix, apply using MatInterpolate() 1344 - fine - finer DM to update 1345 1346 Level: developer 1347 1348 .seealso: DMRefineHookAdd(), MatInterpolate() 1349 @*/ 1350 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1351 { 1352 PetscErrorCode ierr; 1353 DMRefineHookLink link; 1354 1355 PetscFunctionBegin; 1356 for (link=fine->refinehook; link; link=link->next) { 1357 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1358 } 1359 PetscFunctionReturn(0); 1360 } 1361 1362 #undef __FUNCT__ 1363 #define __FUNCT__ "DMGetRefineLevel" 1364 /*@ 1365 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1366 1367 Not Collective 1368 1369 Input Parameter: 1370 . dm - the DM object 1371 1372 Output Parameter: 1373 . level - number of refinements 1374 1375 Level: developer 1376 1377 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1378 1379 @*/ 1380 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1381 { 1382 PetscFunctionBegin; 1383 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1384 *level = dm->levelup; 1385 PetscFunctionReturn(0); 1386 } 1387 1388 #undef __FUNCT__ 1389 #define __FUNCT__ "DMGlobalToLocalBegin" 1390 /*@ 1391 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1392 1393 Neighbor-wise Collective on DM 1394 1395 Input Parameters: 1396 + dm - the DM object 1397 . g - the global vector 1398 . mode - INSERT_VALUES or ADD_VALUES 1399 - l - the local vector 1400 1401 1402 Level: beginner 1403 1404 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1405 1406 @*/ 1407 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1408 { 1409 PetscSF sf; 1410 PetscErrorCode ierr; 1411 1412 PetscFunctionBegin; 1413 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1414 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1415 if (sf) { 1416 PetscScalar *lArray, *gArray; 1417 1418 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1419 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1420 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1421 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1422 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1423 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1424 } else { 1425 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1426 } 1427 PetscFunctionReturn(0); 1428 } 1429 1430 #undef __FUNCT__ 1431 #define __FUNCT__ "DMGlobalToLocalEnd" 1432 /*@ 1433 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1434 1435 Neighbor-wise Collective on DM 1436 1437 Input Parameters: 1438 + dm - the DM object 1439 . g - the global vector 1440 . mode - INSERT_VALUES or ADD_VALUES 1441 - l - the local vector 1442 1443 1444 Level: beginner 1445 1446 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1447 1448 @*/ 1449 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1450 { 1451 PetscSF sf; 1452 PetscErrorCode ierr; 1453 PetscScalar *lArray, *gArray; 1454 1455 PetscFunctionBegin; 1456 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1457 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1458 if (sf) { 1459 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1460 1461 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1462 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1463 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1464 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1465 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1466 } else { 1467 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1468 } 1469 PetscFunctionReturn(0); 1470 } 1471 1472 #undef __FUNCT__ 1473 #define __FUNCT__ "DMLocalToGlobalBegin" 1474 /*@ 1475 DMLocalToGlobalBegin - updates global vectors from local vectors 1476 1477 Neighbor-wise Collective on DM 1478 1479 Input Parameters: 1480 + dm - the DM object 1481 . l - the local vector 1482 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1483 base point. 1484 - - the global vector 1485 1486 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1487 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1488 global array to the final global array with VecAXPY(). 1489 1490 Level: beginner 1491 1492 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1493 1494 @*/ 1495 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1496 { 1497 PetscSF sf; 1498 PetscErrorCode ierr; 1499 1500 PetscFunctionBegin; 1501 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1502 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1503 if (sf) { 1504 MPI_Op op; 1505 PetscScalar *lArray, *gArray; 1506 1507 switch(mode) { 1508 case INSERT_VALUES: 1509 case INSERT_ALL_VALUES: 1510 #if defined(PETSC_HAVE_MPI_REPLACE) 1511 op = MPI_REPLACE; break; 1512 #else 1513 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1514 #endif 1515 case ADD_VALUES: 1516 case ADD_ALL_VALUES: 1517 op = MPI_SUM; break; 1518 default: 1519 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1520 } 1521 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1522 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1523 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1524 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1525 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1526 } else { 1527 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1528 } 1529 PetscFunctionReturn(0); 1530 } 1531 1532 #undef __FUNCT__ 1533 #define __FUNCT__ "DMLocalToGlobalEnd" 1534 /*@ 1535 DMLocalToGlobalEnd - updates global vectors from local vectors 1536 1537 Neighbor-wise Collective on DM 1538 1539 Input Parameters: 1540 + dm - the DM object 1541 . l - the local vector 1542 . mode - INSERT_VALUES or ADD_VALUES 1543 - g - the global vector 1544 1545 1546 Level: beginner 1547 1548 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1549 1550 @*/ 1551 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1552 { 1553 PetscSF sf; 1554 PetscErrorCode ierr; 1555 1556 PetscFunctionBegin; 1557 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1558 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1559 if (sf) { 1560 MPI_Op op; 1561 PetscScalar *lArray, *gArray; 1562 1563 switch(mode) { 1564 case INSERT_VALUES: 1565 case INSERT_ALL_VALUES: 1566 #if defined(PETSC_HAVE_MPI_REPLACE) 1567 op = MPI_REPLACE; break; 1568 #else 1569 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1570 #endif 1571 case ADD_VALUES: 1572 case ADD_ALL_VALUES: 1573 op = MPI_SUM; break; 1574 default: 1575 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1576 } 1577 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1578 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1579 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1580 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1581 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1582 } else { 1583 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1584 } 1585 PetscFunctionReturn(0); 1586 } 1587 1588 #undef __FUNCT__ 1589 #define __FUNCT__ "DMComputeJacobianDefault" 1590 /*@ 1591 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1592 1593 Collective on DM 1594 1595 Input Parameter: 1596 + dm - the DM object 1597 . x - location to compute Jacobian at; may be ignored for linear problems 1598 . A - matrix that defines the operator for the linear solve 1599 - B - the matrix used to construct the preconditioner 1600 1601 Level: developer 1602 1603 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1604 DMSetFunction() 1605 1606 @*/ 1607 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1608 { 1609 PetscErrorCode ierr; 1610 1611 PetscFunctionBegin; 1612 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1613 *stflag = SAME_NONZERO_PATTERN; 1614 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1615 if (A != B) { 1616 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1617 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1618 } 1619 PetscFunctionReturn(0); 1620 } 1621 1622 #undef __FUNCT__ 1623 #define __FUNCT__ "DMCoarsen" 1624 /*@ 1625 DMCoarsen - Coarsens a DM object 1626 1627 Collective on DM 1628 1629 Input Parameter: 1630 + dm - the DM object 1631 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1632 1633 Output Parameter: 1634 . dmc - the coarsened DM 1635 1636 Level: developer 1637 1638 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1639 1640 @*/ 1641 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1642 { 1643 PetscErrorCode ierr; 1644 DMCoarsenHookLink link; 1645 1646 PetscFunctionBegin; 1647 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1648 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1649 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1650 (*dmc)->ops->initialguess = dm->ops->initialguess; 1651 (*dmc)->ops->function = dm->ops->function; 1652 (*dmc)->ops->functionj = dm->ops->functionj; 1653 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1654 (*dmc)->ops->jacobian = dm->ops->jacobian; 1655 } 1656 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1657 (*dmc)->ctx = dm->ctx; 1658 (*dmc)->levelup = dm->levelup; 1659 (*dmc)->leveldown = dm->leveldown + 1; 1660 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1661 for (link=dm->coarsenhook; link; link=link->next) { 1662 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1663 } 1664 PetscFunctionReturn(0); 1665 } 1666 1667 #undef __FUNCT__ 1668 #define __FUNCT__ "DMCoarsenHookAdd" 1669 /*@ 1670 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1671 1672 Logically Collective 1673 1674 Input Arguments: 1675 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1676 . coarsenhook - function to run when setting up a coarser level 1677 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1678 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1679 1680 Calling sequence of coarsenhook: 1681 $ coarsenhook(DM fine,DM coarse,void *ctx); 1682 1683 + fine - fine level DM 1684 . coarse - coarse level DM to restrict problem to 1685 - ctx - optional user-defined function context 1686 1687 Calling sequence for restricthook: 1688 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1689 1690 + fine - fine level DM 1691 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1692 . rscale - scaling vector for restriction 1693 . inject - matrix restricting by injection 1694 . coarse - coarse level DM to update 1695 - ctx - optional user-defined function context 1696 1697 Level: advanced 1698 1699 Notes: 1700 This function is only needed if auxiliary data needs to be set up on coarse grids. 1701 1702 If this function is called multiple times, the hooks will be run in the order they are added. 1703 1704 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1705 extract the finest level information from its context (instead of from the SNES). 1706 1707 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1708 @*/ 1709 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1710 { 1711 PetscErrorCode ierr; 1712 DMCoarsenHookLink link,*p; 1713 1714 PetscFunctionBegin; 1715 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1716 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1717 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1718 link->coarsenhook = coarsenhook; 1719 link->restricthook = restricthook; 1720 link->ctx = ctx; 1721 link->next = PETSC_NULL; 1722 *p = link; 1723 PetscFunctionReturn(0); 1724 } 1725 1726 #undef __FUNCT__ 1727 #define __FUNCT__ "DMRestrict" 1728 /*@ 1729 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1730 1731 Collective if any hooks are 1732 1733 Input Arguments: 1734 + fine - finer DM to use as a base 1735 . restrct - restriction matrix, apply using MatRestrict() 1736 . inject - injection matrix, also use MatRestrict() 1737 - coarse - coarer DM to update 1738 1739 Level: developer 1740 1741 .seealso: DMCoarsenHookAdd(), MatRestrict() 1742 @*/ 1743 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1744 { 1745 PetscErrorCode ierr; 1746 DMCoarsenHookLink link; 1747 1748 PetscFunctionBegin; 1749 for (link=fine->coarsenhook; link; link=link->next) { 1750 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1751 } 1752 PetscFunctionReturn(0); 1753 } 1754 1755 #undef __FUNCT__ 1756 #define __FUNCT__ "DMBlockRestrictHookAdd" 1757 /*@ 1758 DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1759 1760 Logically Collective 1761 1762 Input Arguments: 1763 + global - global DM 1764 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1765 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1766 1767 Calling sequence for restricthook: 1768 $ restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1769 1770 + global - global DM 1771 . out - scatter to the outer (with ghost and overlap points) block vector 1772 . in - scatter to block vector values only owned locally 1773 . block - block DM (may just be a shell if the global DM is passed in correctly) 1774 - ctx - optional user-defined function context 1775 1776 Level: advanced 1777 1778 Notes: 1779 This function is only needed if auxiliary data needs to be set up on coarse grids. 1780 1781 If this function is called multiple times, the hooks will be run in the order they are added. 1782 1783 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1784 extract the finest level information from its context (instead of from the SNES). 1785 1786 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1787 @*/ 1788 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1789 { 1790 PetscErrorCode ierr; 1791 DMBlockRestrictHookLink link,*p; 1792 1793 PetscFunctionBegin; 1794 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1795 for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1796 ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr); 1797 link->restricthook = restricthook; 1798 link->ctx = ctx; 1799 link->next = PETSC_NULL; 1800 *p = link; 1801 PetscFunctionReturn(0); 1802 } 1803 1804 #undef __FUNCT__ 1805 #define __FUNCT__ "DMBlockRestrict" 1806 /*@ 1807 DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd() 1808 1809 Collective if any hooks are 1810 1811 Input Arguments: 1812 + fine - finer DM to use as a base 1813 . restrct - restriction matrix, apply using MatRestrict() 1814 . inject - injection matrix, also use MatRestrict() 1815 - coarse - coarer DM to update 1816 1817 Level: developer 1818 1819 .seealso: DMCoarsenHookAdd(), MatRestrict() 1820 @*/ 1821 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block) 1822 { 1823 PetscErrorCode ierr; 1824 DMBlockRestrictHookLink link; 1825 1826 PetscFunctionBegin; 1827 for (link=global->blockrestricthook; link; link=link->next) { 1828 if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);} 1829 } 1830 PetscFunctionReturn(0); 1831 } 1832 1833 #undef __FUNCT__ 1834 #define __FUNCT__ "DMGetCoarsenLevel" 1835 /*@ 1836 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1837 1838 Not Collective 1839 1840 Input Parameter: 1841 . dm - the DM object 1842 1843 Output Parameter: 1844 . level - number of coarsenings 1845 1846 Level: developer 1847 1848 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1849 1850 @*/ 1851 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 1852 { 1853 PetscFunctionBegin; 1854 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1855 *level = dm->leveldown; 1856 PetscFunctionReturn(0); 1857 } 1858 1859 1860 1861 #undef __FUNCT__ 1862 #define __FUNCT__ "DMRefineHierarchy" 1863 /*@C 1864 DMRefineHierarchy - Refines a DM object, all levels at once 1865 1866 Collective on DM 1867 1868 Input Parameter: 1869 + dm - the DM object 1870 - nlevels - the number of levels of refinement 1871 1872 Output Parameter: 1873 . dmf - the refined DM hierarchy 1874 1875 Level: developer 1876 1877 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1878 1879 @*/ 1880 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 1881 { 1882 PetscErrorCode ierr; 1883 1884 PetscFunctionBegin; 1885 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1886 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1887 if (nlevels == 0) PetscFunctionReturn(0); 1888 if (dm->ops->refinehierarchy) { 1889 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 1890 } else if (dm->ops->refine) { 1891 PetscInt i; 1892 1893 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 1894 for (i=1; i<nlevels; i++) { 1895 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 1896 } 1897 } else { 1898 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 1899 } 1900 PetscFunctionReturn(0); 1901 } 1902 1903 #undef __FUNCT__ 1904 #define __FUNCT__ "DMCoarsenHierarchy" 1905 /*@C 1906 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 1907 1908 Collective on DM 1909 1910 Input Parameter: 1911 + dm - the DM object 1912 - nlevels - the number of levels of coarsening 1913 1914 Output Parameter: 1915 . dmc - the coarsened DM hierarchy 1916 1917 Level: developer 1918 1919 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1920 1921 @*/ 1922 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 1923 { 1924 PetscErrorCode ierr; 1925 1926 PetscFunctionBegin; 1927 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1928 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1929 if (nlevels == 0) PetscFunctionReturn(0); 1930 PetscValidPointer(dmc,3); 1931 if (dm->ops->coarsenhierarchy) { 1932 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 1933 } else if (dm->ops->coarsen) { 1934 PetscInt i; 1935 1936 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 1937 for (i=1; i<nlevels; i++) { 1938 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 1939 } 1940 } else { 1941 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 1942 } 1943 PetscFunctionReturn(0); 1944 } 1945 1946 #undef __FUNCT__ 1947 #define __FUNCT__ "DMCreateAggregates" 1948 /*@ 1949 DMCreateAggregates - Gets the aggregates that map between 1950 grids associated with two DMs. 1951 1952 Collective on DM 1953 1954 Input Parameters: 1955 + dmc - the coarse grid DM 1956 - dmf - the fine grid DM 1957 1958 Output Parameters: 1959 . rest - the restriction matrix (transpose of the projection matrix) 1960 1961 Level: intermediate 1962 1963 .keywords: interpolation, restriction, multigrid 1964 1965 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 1966 @*/ 1967 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 1968 { 1969 PetscErrorCode ierr; 1970 1971 PetscFunctionBegin; 1972 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 1973 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 1974 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 1975 PetscFunctionReturn(0); 1976 } 1977 1978 #undef __FUNCT__ 1979 #define __FUNCT__ "DMSetApplicationContextDestroy" 1980 /*@C 1981 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 1982 1983 Not Collective 1984 1985 Input Parameters: 1986 + dm - the DM object 1987 - destroy - the destroy function 1988 1989 Level: intermediate 1990 1991 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1992 1993 @*/ 1994 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 1995 { 1996 PetscFunctionBegin; 1997 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1998 dm->ctxdestroy = destroy; 1999 PetscFunctionReturn(0); 2000 } 2001 2002 #undef __FUNCT__ 2003 #define __FUNCT__ "DMSetApplicationContext" 2004 /*@ 2005 DMSetApplicationContext - Set a user context into a DM object 2006 2007 Not Collective 2008 2009 Input Parameters: 2010 + dm - the DM object 2011 - ctx - the user context 2012 2013 Level: intermediate 2014 2015 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2016 2017 @*/ 2018 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2019 { 2020 PetscFunctionBegin; 2021 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2022 dm->ctx = ctx; 2023 PetscFunctionReturn(0); 2024 } 2025 2026 #undef __FUNCT__ 2027 #define __FUNCT__ "DMGetApplicationContext" 2028 /*@ 2029 DMGetApplicationContext - Gets a user context from a DM object 2030 2031 Not Collective 2032 2033 Input Parameter: 2034 . dm - the DM object 2035 2036 Output Parameter: 2037 . ctx - the user context 2038 2039 Level: intermediate 2040 2041 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2042 2043 @*/ 2044 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2045 { 2046 PetscFunctionBegin; 2047 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2048 *(void**)ctx = dm->ctx; 2049 PetscFunctionReturn(0); 2050 } 2051 2052 #undef __FUNCT__ 2053 #define __FUNCT__ "DMSetInitialGuess" 2054 /*@C 2055 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 2056 2057 Logically Collective on DM 2058 2059 Input Parameter: 2060 + dm - the DM object to destroy 2061 - f - the function to compute the initial guess 2062 2063 Level: intermediate 2064 2065 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2066 2067 @*/ 2068 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 2069 { 2070 PetscFunctionBegin; 2071 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2072 dm->ops->initialguess = f; 2073 PetscFunctionReturn(0); 2074 } 2075 2076 #undef __FUNCT__ 2077 #define __FUNCT__ "DMSetFunction" 2078 /*@C 2079 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 2080 2081 Logically Collective on DM 2082 2083 Input Parameter: 2084 + dm - the DM object 2085 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 2086 2087 Level: intermediate 2088 2089 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 2090 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 2091 2092 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2093 DMSetJacobian() 2094 2095 @*/ 2096 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2097 { 2098 PetscFunctionBegin; 2099 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2100 dm->ops->function = f; 2101 if (f) { 2102 dm->ops->functionj = f; 2103 } 2104 PetscFunctionReturn(0); 2105 } 2106 2107 #undef __FUNCT__ 2108 #define __FUNCT__ "DMSetJacobian" 2109 /*@C 2110 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 2111 2112 Logically Collective on DM 2113 2114 Input Parameter: 2115 + dm - the DM object to destroy 2116 - f - the function to compute the matrix entries 2117 2118 Level: intermediate 2119 2120 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2121 DMSetFunction() 2122 2123 @*/ 2124 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 2125 { 2126 PetscFunctionBegin; 2127 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2128 dm->ops->jacobian = f; 2129 PetscFunctionReturn(0); 2130 } 2131 2132 #undef __FUNCT__ 2133 #define __FUNCT__ "DMSetVariableBounds" 2134 /*@C 2135 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2136 2137 Logically Collective on DM 2138 2139 Input Parameter: 2140 + dm - the DM object 2141 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 2142 2143 Level: intermediate 2144 2145 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2146 DMSetJacobian() 2147 2148 @*/ 2149 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2150 { 2151 PetscFunctionBegin; 2152 dm->ops->computevariablebounds = f; 2153 PetscFunctionReturn(0); 2154 } 2155 2156 #undef __FUNCT__ 2157 #define __FUNCT__ "DMHasVariableBounds" 2158 /*@ 2159 DMHasVariableBounds - does the DM object have a variable bounds function? 2160 2161 Not Collective 2162 2163 Input Parameter: 2164 . dm - the DM object to destroy 2165 2166 Output Parameter: 2167 . flg - PETSC_TRUE if the variable bounds function exists 2168 2169 Level: developer 2170 2171 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2172 2173 @*/ 2174 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2175 { 2176 PetscFunctionBegin; 2177 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2178 PetscFunctionReturn(0); 2179 } 2180 2181 #undef __FUNCT__ 2182 #define __FUNCT__ "DMComputeVariableBounds" 2183 /*@C 2184 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2185 2186 Logically Collective on DM 2187 2188 Input Parameters: 2189 + dm - the DM object to destroy 2190 - x - current solution at which the bounds are computed 2191 2192 Output parameters: 2193 + xl - lower bound 2194 - xu - upper bound 2195 2196 Level: intermediate 2197 2198 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2199 DMSetFunction(), DMSetVariableBounds() 2200 2201 @*/ 2202 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2203 { 2204 PetscErrorCode ierr; 2205 PetscFunctionBegin; 2206 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2207 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2208 if (dm->ops->computevariablebounds) { 2209 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2210 } 2211 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2212 PetscFunctionReturn(0); 2213 } 2214 2215 #undef __FUNCT__ 2216 #define __FUNCT__ "DMComputeInitialGuess" 2217 /*@ 2218 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2219 2220 Collective on DM 2221 2222 Input Parameter: 2223 + dm - the DM object to destroy 2224 - x - the vector to hold the initial guess values 2225 2226 Level: developer 2227 2228 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2229 2230 @*/ 2231 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2232 { 2233 PetscErrorCode ierr; 2234 PetscFunctionBegin; 2235 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2236 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2237 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2238 PetscFunctionReturn(0); 2239 } 2240 2241 #undef __FUNCT__ 2242 #define __FUNCT__ "DMHasInitialGuess" 2243 /*@ 2244 DMHasInitialGuess - does the DM object have an initial guess function 2245 2246 Not Collective 2247 2248 Input Parameter: 2249 . dm - the DM object to destroy 2250 2251 Output Parameter: 2252 . flg - PETSC_TRUE if function exists 2253 2254 Level: developer 2255 2256 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2257 2258 @*/ 2259 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2260 { 2261 PetscFunctionBegin; 2262 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2263 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2264 PetscFunctionReturn(0); 2265 } 2266 2267 #undef __FUNCT__ 2268 #define __FUNCT__ "DMHasFunction" 2269 /*@ 2270 DMHasFunction - does the DM object have a function 2271 2272 Not Collective 2273 2274 Input Parameter: 2275 . dm - the DM object to destroy 2276 2277 Output Parameter: 2278 . flg - PETSC_TRUE if function exists 2279 2280 Level: developer 2281 2282 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2283 2284 @*/ 2285 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2286 { 2287 PetscFunctionBegin; 2288 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2289 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2290 PetscFunctionReturn(0); 2291 } 2292 2293 #undef __FUNCT__ 2294 #define __FUNCT__ "DMHasJacobian" 2295 /*@ 2296 DMHasJacobian - does the DM object have a matrix function 2297 2298 Not Collective 2299 2300 Input Parameter: 2301 . dm - the DM object to destroy 2302 2303 Output Parameter: 2304 . flg - PETSC_TRUE if function exists 2305 2306 Level: developer 2307 2308 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2309 2310 @*/ 2311 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2312 { 2313 PetscFunctionBegin; 2314 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2315 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2316 PetscFunctionReturn(0); 2317 } 2318 2319 #undef __FUNCT__ 2320 #define __FUNCT__ "DMHasColoring" 2321 /*@ 2322 DMHasColoring - does the DM object have a method of providing a coloring? 2323 2324 Not Collective 2325 2326 Input Parameter: 2327 . dm - the DM object 2328 2329 Output Parameter: 2330 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2331 2332 Level: developer 2333 2334 .seealso DMHasFunction(), DMCreateColoring() 2335 2336 @*/ 2337 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2338 { 2339 PetscFunctionBegin; 2340 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2341 PetscFunctionReturn(0); 2342 } 2343 2344 #undef __FUNCT__ 2345 #define __FUNCT__ "DMSetVec" 2346 /*@C 2347 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2348 2349 Collective on DM 2350 2351 Input Parameter: 2352 + dm - the DM object 2353 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2354 2355 Level: developer 2356 2357 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2358 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2359 2360 @*/ 2361 PetscErrorCode DMSetVec(DM dm,Vec x) 2362 { 2363 PetscErrorCode ierr; 2364 PetscFunctionBegin; 2365 if (x) { 2366 if (!dm->x) { 2367 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2368 } 2369 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2370 } 2371 else if (dm->x) { 2372 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2373 } 2374 PetscFunctionReturn(0); 2375 } 2376 2377 2378 #undef __FUNCT__ 2379 #define __FUNCT__ "DMComputeFunction" 2380 /*@ 2381 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2382 2383 Collective on DM 2384 2385 Input Parameter: 2386 + dm - the DM object to destroy 2387 . x - the location where the function is evaluationed, may be ignored for linear problems 2388 - b - the vector to hold the right hand side entries 2389 2390 Level: developer 2391 2392 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2393 DMSetJacobian() 2394 2395 @*/ 2396 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2397 { 2398 PetscErrorCode ierr; 2399 PetscFunctionBegin; 2400 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2401 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2402 PetscStackPush("DM user function"); 2403 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2404 PetscStackPop; 2405 PetscFunctionReturn(0); 2406 } 2407 2408 2409 2410 #undef __FUNCT__ 2411 #define __FUNCT__ "DMComputeJacobian" 2412 /*@ 2413 DMComputeJacobian - compute the matrix entries for the solver 2414 2415 Collective on DM 2416 2417 Input Parameter: 2418 + dm - the DM object 2419 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2420 . A - matrix that defines the operator for the linear solve 2421 - B - the matrix used to construct the preconditioner 2422 2423 Level: developer 2424 2425 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2426 DMSetFunction() 2427 2428 @*/ 2429 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2430 { 2431 PetscErrorCode ierr; 2432 2433 PetscFunctionBegin; 2434 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2435 if (!dm->ops->jacobian) { 2436 ISColoring coloring; 2437 MatFDColoring fd; 2438 const MatType mtype; 2439 2440 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2441 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2442 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2443 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2444 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2445 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2446 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2447 2448 dm->fd = fd; 2449 dm->ops->jacobian = DMComputeJacobianDefault; 2450 2451 /* don't know why this is needed */ 2452 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2453 } 2454 if (!x) x = dm->x; 2455 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2456 2457 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2458 if (x) { 2459 if (!dm->x) { 2460 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2461 } 2462 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2463 } 2464 if (A != B) { 2465 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2466 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2467 } 2468 PetscFunctionReturn(0); 2469 } 2470 2471 2472 PetscFList DMList = PETSC_NULL; 2473 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2474 2475 #undef __FUNCT__ 2476 #define __FUNCT__ "DMSetType" 2477 /*@C 2478 DMSetType - Builds a DM, for a particular DM implementation. 2479 2480 Collective on DM 2481 2482 Input Parameters: 2483 + dm - The DM object 2484 - method - The name of the DM type 2485 2486 Options Database Key: 2487 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2488 2489 Notes: 2490 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2491 2492 Level: intermediate 2493 2494 .keywords: DM, set, type 2495 .seealso: DMGetType(), DMCreate() 2496 @*/ 2497 PetscErrorCode DMSetType(DM dm, const DMType method) 2498 { 2499 PetscErrorCode (*r)(DM); 2500 PetscBool match; 2501 PetscErrorCode ierr; 2502 2503 PetscFunctionBegin; 2504 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2505 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2506 if (match) PetscFunctionReturn(0); 2507 2508 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2509 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2510 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2511 2512 if (dm->ops->destroy) { 2513 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2514 dm->ops->destroy = PETSC_NULL; 2515 } 2516 ierr = (*r)(dm);CHKERRQ(ierr); 2517 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2518 PetscFunctionReturn(0); 2519 } 2520 2521 #undef __FUNCT__ 2522 #define __FUNCT__ "DMGetType" 2523 /*@C 2524 DMGetType - Gets the DM type name (as a string) from the DM. 2525 2526 Not Collective 2527 2528 Input Parameter: 2529 . dm - The DM 2530 2531 Output Parameter: 2532 . type - The DM type name 2533 2534 Level: intermediate 2535 2536 .keywords: DM, get, type, name 2537 .seealso: DMSetType(), DMCreate() 2538 @*/ 2539 PetscErrorCode DMGetType(DM dm, const DMType *type) 2540 { 2541 PetscErrorCode ierr; 2542 2543 PetscFunctionBegin; 2544 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2545 PetscValidCharPointer(type,2); 2546 if (!DMRegisterAllCalled) { 2547 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2548 } 2549 *type = ((PetscObject)dm)->type_name; 2550 PetscFunctionReturn(0); 2551 } 2552 2553 #undef __FUNCT__ 2554 #define __FUNCT__ "DMConvert" 2555 /*@C 2556 DMConvert - Converts a DM to another DM, either of the same or different type. 2557 2558 Collective on DM 2559 2560 Input Parameters: 2561 + dm - the DM 2562 - newtype - new DM type (use "same" for the same type) 2563 2564 Output Parameter: 2565 . M - pointer to new DM 2566 2567 Notes: 2568 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2569 the MPI communicator of the generated DM is always the same as the communicator 2570 of the input DM. 2571 2572 Level: intermediate 2573 2574 .seealso: DMCreate() 2575 @*/ 2576 PetscErrorCode DMConvert(DM dm, const DMType newtype, DM *M) 2577 { 2578 DM B; 2579 char convname[256]; 2580 PetscBool sametype, issame; 2581 PetscErrorCode ierr; 2582 2583 PetscFunctionBegin; 2584 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2585 PetscValidType(dm,1); 2586 PetscValidPointer(M,3); 2587 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2588 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2589 { 2590 PetscErrorCode (*conv)(DM, const DMType, DM *) = PETSC_NULL; 2591 2592 /* 2593 Order of precedence: 2594 1) See if a specialized converter is known to the current DM. 2595 2) See if a specialized converter is known to the desired DM class. 2596 3) See if a good general converter is registered for the desired class 2597 4) See if a good general converter is known for the current matrix. 2598 5) Use a really basic converter. 2599 */ 2600 2601 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2602 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2603 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2604 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2605 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2606 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2607 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2608 if (conv) goto foundconv; 2609 2610 /* 2) See if a specialized converter is known to the desired DM class. */ 2611 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2612 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2613 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2614 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2615 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2616 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2617 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2618 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2619 if (conv) { 2620 ierr = DMDestroy(&B);CHKERRQ(ierr); 2621 goto foundconv; 2622 } 2623 2624 #if 0 2625 /* 3) See if a good general converter is registered for the desired class */ 2626 conv = B->ops->convertfrom; 2627 ierr = DMDestroy(&B);CHKERRQ(ierr); 2628 if (conv) goto foundconv; 2629 2630 /* 4) See if a good general converter is known for the current matrix */ 2631 if (dm->ops->convert) { 2632 conv = dm->ops->convert; 2633 } 2634 if (conv) goto foundconv; 2635 #endif 2636 2637 /* 5) Use a really basic converter. */ 2638 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2639 2640 foundconv: 2641 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2642 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2643 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2644 } 2645 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2646 PetscFunctionReturn(0); 2647 } 2648 2649 /*--------------------------------------------------------------------------------------------------------------------*/ 2650 2651 #undef __FUNCT__ 2652 #define __FUNCT__ "DMRegister" 2653 /*@C 2654 DMRegister - See DMRegisterDynamic() 2655 2656 Level: advanced 2657 @*/ 2658 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2659 { 2660 char fullname[PETSC_MAX_PATH_LEN]; 2661 PetscErrorCode ierr; 2662 2663 PetscFunctionBegin; 2664 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2665 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2666 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2667 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2668 PetscFunctionReturn(0); 2669 } 2670 2671 2672 /*--------------------------------------------------------------------------------------------------------------------*/ 2673 #undef __FUNCT__ 2674 #define __FUNCT__ "DMRegisterDestroy" 2675 /*@C 2676 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2677 2678 Not Collective 2679 2680 Level: advanced 2681 2682 .keywords: DM, register, destroy 2683 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2684 @*/ 2685 PetscErrorCode DMRegisterDestroy(void) 2686 { 2687 PetscErrorCode ierr; 2688 2689 PetscFunctionBegin; 2690 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2691 DMRegisterAllCalled = PETSC_FALSE; 2692 PetscFunctionReturn(0); 2693 } 2694 2695 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2696 #include <mex.h> 2697 2698 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2699 2700 #undef __FUNCT__ 2701 #define __FUNCT__ "DMComputeFunction_Matlab" 2702 /* 2703 DMComputeFunction_Matlab - Calls the function that has been set with 2704 DMSetFunctionMatlab(). 2705 2706 For linear problems x is null 2707 2708 .seealso: DMSetFunction(), DMGetFunction() 2709 */ 2710 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2711 { 2712 PetscErrorCode ierr; 2713 DMMatlabContext *sctx; 2714 int nlhs = 1,nrhs = 4; 2715 mxArray *plhs[1],*prhs[4]; 2716 long long int lx = 0,ly = 0,ls = 0; 2717 2718 PetscFunctionBegin; 2719 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2720 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2721 PetscCheckSameComm(dm,1,y,3); 2722 2723 /* call Matlab function in ctx with arguments x and y */ 2724 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2725 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2726 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2727 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2728 prhs[0] = mxCreateDoubleScalar((double)ls); 2729 prhs[1] = mxCreateDoubleScalar((double)lx); 2730 prhs[2] = mxCreateDoubleScalar((double)ly); 2731 prhs[3] = mxCreateString(sctx->funcname); 2732 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2733 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2734 mxDestroyArray(prhs[0]); 2735 mxDestroyArray(prhs[1]); 2736 mxDestroyArray(prhs[2]); 2737 mxDestroyArray(prhs[3]); 2738 mxDestroyArray(plhs[0]); 2739 PetscFunctionReturn(0); 2740 } 2741 2742 2743 #undef __FUNCT__ 2744 #define __FUNCT__ "DMSetFunctionMatlab" 2745 /* 2746 DMSetFunctionMatlab - Sets the function evaluation routine 2747 2748 */ 2749 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2750 { 2751 PetscErrorCode ierr; 2752 DMMatlabContext *sctx; 2753 2754 PetscFunctionBegin; 2755 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2756 /* currently sctx is memory bleed */ 2757 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2758 if (!sctx) { 2759 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2760 } 2761 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2762 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2763 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2764 PetscFunctionReturn(0); 2765 } 2766 2767 #undef __FUNCT__ 2768 #define __FUNCT__ "DMComputeJacobian_Matlab" 2769 /* 2770 DMComputeJacobian_Matlab - Calls the function that has been set with 2771 DMSetJacobianMatlab(). 2772 2773 For linear problems x is null 2774 2775 .seealso: DMSetFunction(), DMGetFunction() 2776 */ 2777 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2778 { 2779 PetscErrorCode ierr; 2780 DMMatlabContext *sctx; 2781 int nlhs = 2,nrhs = 5; 2782 mxArray *plhs[2],*prhs[5]; 2783 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2784 2785 PetscFunctionBegin; 2786 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2787 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2788 2789 /* call MATLAB function in ctx with arguments x, A, and B */ 2790 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2791 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2792 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2793 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2794 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2795 prhs[0] = mxCreateDoubleScalar((double)ls); 2796 prhs[1] = mxCreateDoubleScalar((double)lx); 2797 prhs[2] = mxCreateDoubleScalar((double)lA); 2798 prhs[3] = mxCreateDoubleScalar((double)lB); 2799 prhs[4] = mxCreateString(sctx->jacname); 2800 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2801 *str = (MatStructure) mxGetScalar(plhs[0]); 2802 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2803 mxDestroyArray(prhs[0]); 2804 mxDestroyArray(prhs[1]); 2805 mxDestroyArray(prhs[2]); 2806 mxDestroyArray(prhs[3]); 2807 mxDestroyArray(prhs[4]); 2808 mxDestroyArray(plhs[0]); 2809 mxDestroyArray(plhs[1]); 2810 PetscFunctionReturn(0); 2811 } 2812 2813 2814 #undef __FUNCT__ 2815 #define __FUNCT__ "DMSetJacobianMatlab" 2816 /* 2817 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2818 2819 */ 2820 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2821 { 2822 PetscErrorCode ierr; 2823 DMMatlabContext *sctx; 2824 2825 PetscFunctionBegin; 2826 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2827 /* currently sctx is memory bleed */ 2828 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2829 if (!sctx) { 2830 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2831 } 2832 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2833 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2834 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2835 PetscFunctionReturn(0); 2836 } 2837 #endif 2838 2839 #undef __FUNCT__ 2840 #define __FUNCT__ "DMLoad" 2841 /*@C 2842 DMLoad - Loads a DM that has been stored in binary or HDF5 format 2843 with DMView(). 2844 2845 Collective on PetscViewer 2846 2847 Input Parameters: 2848 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2849 some related function before a call to DMLoad(). 2850 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2851 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2852 2853 Level: intermediate 2854 2855 Notes: 2856 Defaults to the DM DA. 2857 2858 Notes for advanced users: 2859 Most users should not need to know the details of the binary storage 2860 format, since DMLoad() and DMView() completely hide these details. 2861 But for anyone who's interested, the standard binary matrix storage 2862 format is 2863 .vb 2864 has not yet been determined 2865 .ve 2866 2867 In addition, PETSc automatically does the byte swapping for 2868 machines that store the bytes reversed, e.g. DEC alpha, freebsd, 2869 linux, Windows and the paragon; thus if you write your own binary 2870 read/write routines you have to swap the bytes; see PetscBinaryRead() 2871 and PetscBinaryWrite() to see how this may be done. 2872 2873 Concepts: vector^loading from file 2874 2875 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2876 @*/ 2877 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2878 { 2879 PetscErrorCode ierr; 2880 2881 PetscFunctionBegin; 2882 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2883 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2884 2885 if (!((PetscObject)newdm)->type_name) { 2886 ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr); 2887 } 2888 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2889 PetscFunctionReturn(0); 2890 } 2891 2892 /******************************** FEM Support **********************************/ 2893 2894 #undef __FUNCT__ 2895 #define __FUNCT__ "DMPrintCellVector" 2896 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 2897 PetscInt f; 2898 PetscErrorCode ierr; 2899 2900 PetscFunctionBegin; 2901 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2902 for (f = 0; f < len; ++f) { 2903 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2904 } 2905 PetscFunctionReturn(0); 2906 } 2907 2908 #undef __FUNCT__ 2909 #define __FUNCT__ "DMPrintCellMatrix" 2910 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 2911 PetscInt f, g; 2912 PetscErrorCode ierr; 2913 2914 PetscFunctionBegin; 2915 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2916 for (f = 0; f < rows; ++f) { 2917 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2918 for (g = 0; g < cols; ++g) { 2919 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2920 } 2921 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2922 } 2923 PetscFunctionReturn(0); 2924 } 2925 2926 #undef __FUNCT__ 2927 #define __FUNCT__ "DMGetLocalFunction" 2928 /*@C 2929 DMGetLocalFunction - Get the local residual function from this DM 2930 2931 Not collective 2932 2933 Input Parameter: 2934 . dm - The DM 2935 2936 Output Parameter: 2937 . lf - The local residual function 2938 2939 Calling sequence of lf: 2940 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2941 2942 + snes - the SNES context 2943 . x - local vector with the state at which to evaluate residual 2944 . f - local vector to put residual in 2945 - ctx - optional user-defined function context 2946 2947 Level: intermediate 2948 2949 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2950 @*/ 2951 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 2952 { 2953 PetscFunctionBegin; 2954 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2955 if (lf) *lf = dm->lf; 2956 PetscFunctionReturn(0); 2957 } 2958 2959 #undef __FUNCT__ 2960 #define __FUNCT__ "DMSetLocalFunction" 2961 /*@C 2962 DMSetLocalFunction - Set the local residual function from this DM 2963 2964 Not collective 2965 2966 Input Parameters: 2967 + dm - The DM 2968 - lf - The local residual function 2969 2970 Calling sequence of lf: 2971 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2972 2973 + snes - the SNES context 2974 . x - local vector with the state at which to evaluate residual 2975 . f - local vector to put residual in 2976 - ctx - optional user-defined function context 2977 2978 Level: intermediate 2979 2980 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2981 @*/ 2982 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 2983 { 2984 PetscFunctionBegin; 2985 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2986 dm->lf = lf; 2987 PetscFunctionReturn(0); 2988 } 2989 2990 #undef __FUNCT__ 2991 #define __FUNCT__ "DMGetLocalJacobian" 2992 /*@C 2993 DMGetLocalJacobian - Get the local Jacobian function from this DM 2994 2995 Not collective 2996 2997 Input Parameter: 2998 . dm - The DM 2999 3000 Output Parameter: 3001 . lj - The local Jacobian function 3002 3003 Calling sequence of lj: 3004 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3005 3006 + snes - the SNES context 3007 . x - local vector with the state at which to evaluate residual 3008 . J - matrix to put Jacobian in 3009 . M - matrix to use for defining Jacobian preconditioner 3010 - ctx - optional user-defined function context 3011 3012 Level: intermediate 3013 3014 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3015 @*/ 3016 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 3017 { 3018 PetscFunctionBegin; 3019 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3020 if (lj) *lj = dm->lj; 3021 PetscFunctionReturn(0); 3022 } 3023 3024 #undef __FUNCT__ 3025 #define __FUNCT__ "DMSetLocalJacobian" 3026 /*@C 3027 DMSetLocalJacobian - Set the local Jacobian function from this DM 3028 3029 Not collective 3030 3031 Input Parameters: 3032 + dm - The DM 3033 - lj - The local Jacobian function 3034 3035 Calling sequence of lj: 3036 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3037 3038 + snes - the SNES context 3039 . x - local vector with the state at which to evaluate residual 3040 . J - matrix to put Jacobian in 3041 . M - matrix to use for defining Jacobian preconditioner 3042 - ctx - optional user-defined function context 3043 3044 Level: intermediate 3045 3046 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3047 @*/ 3048 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 3049 { 3050 PetscFunctionBegin; 3051 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3052 dm->lj = lj; 3053 PetscFunctionReturn(0); 3054 } 3055 3056 #undef __FUNCT__ 3057 #define __FUNCT__ "DMGetDefaultSection" 3058 /*@ 3059 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 3060 3061 Input Parameter: 3062 . dm - The DM 3063 3064 Output Parameter: 3065 . section - The PetscSection 3066 3067 Level: intermediate 3068 3069 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3070 3071 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3072 @*/ 3073 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 3074 PetscFunctionBegin; 3075 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3076 PetscValidPointer(section, 2); 3077 *section = dm->defaultSection; 3078 PetscFunctionReturn(0); 3079 } 3080 3081 #undef __FUNCT__ 3082 #define __FUNCT__ "DMSetDefaultSection" 3083 /*@ 3084 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 3085 3086 Input Parameters: 3087 + dm - The DM 3088 - section - The PetscSection 3089 3090 Level: intermediate 3091 3092 Note: Any existing Section will be destroyed 3093 3094 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3095 @*/ 3096 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 3097 PetscInt numFields; 3098 PetscInt f; 3099 PetscErrorCode ierr; 3100 3101 PetscFunctionBegin; 3102 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3103 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 3104 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3105 dm->defaultSection = section; 3106 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 3107 if (numFields) { 3108 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 3109 for (f = 0; f < numFields; ++f) { 3110 const char *name; 3111 3112 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 3113 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 3114 } 3115 } 3116 PetscFunctionReturn(0); 3117 } 3118 3119 #undef __FUNCT__ 3120 #define __FUNCT__ "DMGetDefaultGlobalSection" 3121 /*@ 3122 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 3123 3124 Input Parameter: 3125 . dm - The DM 3126 3127 Output Parameter: 3128 . section - The PetscSection 3129 3130 Level: intermediate 3131 3132 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3133 3134 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 3135 @*/ 3136 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 3137 PetscErrorCode ierr; 3138 3139 PetscFunctionBegin; 3140 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3141 PetscValidPointer(section, 2); 3142 if (!dm->defaultGlobalSection) { 3143 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 3144 } 3145 *section = dm->defaultGlobalSection; 3146 PetscFunctionReturn(0); 3147 } 3148 3149 #undef __FUNCT__ 3150 #define __FUNCT__ "DMSetDefaultGlobalSection" 3151 /*@ 3152 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 3153 3154 Input Parameters: 3155 + dm - The DM 3156 - section - The PetscSection 3157 3158 Level: intermediate 3159 3160 Note: Any existing Section will be destroyed 3161 3162 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 3163 @*/ 3164 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) { 3165 PetscErrorCode ierr; 3166 3167 PetscFunctionBegin; 3168 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3169 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3170 dm->defaultGlobalSection = section; 3171 PetscFunctionReturn(0); 3172 } 3173 3174 #undef __FUNCT__ 3175 #define __FUNCT__ "DMGetDefaultSF" 3176 /*@ 3177 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3178 it is created from the default PetscSection layouts in the DM. 3179 3180 Input Parameter: 3181 . dm - The DM 3182 3183 Output Parameter: 3184 . sf - The PetscSF 3185 3186 Level: intermediate 3187 3188 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3189 3190 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3191 @*/ 3192 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3193 PetscInt nroots; 3194 PetscErrorCode ierr; 3195 3196 PetscFunctionBegin; 3197 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3198 PetscValidPointer(sf, 2); 3199 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3200 if (nroots < 0) { 3201 PetscSection section, gSection; 3202 3203 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3204 if (section) { 3205 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3206 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3207 } else { 3208 *sf = PETSC_NULL; 3209 PetscFunctionReturn(0); 3210 } 3211 } 3212 *sf = dm->defaultSF; 3213 PetscFunctionReturn(0); 3214 } 3215 3216 #undef __FUNCT__ 3217 #define __FUNCT__ "DMSetDefaultSF" 3218 /*@ 3219 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3220 3221 Input Parameters: 3222 + dm - The DM 3223 - sf - The PetscSF 3224 3225 Level: intermediate 3226 3227 Note: Any previous SF is destroyed 3228 3229 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3230 @*/ 3231 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3232 PetscErrorCode ierr; 3233 3234 PetscFunctionBegin; 3235 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3236 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3237 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3238 dm->defaultSF = sf; 3239 PetscFunctionReturn(0); 3240 } 3241 3242 #undef __FUNCT__ 3243 #define __FUNCT__ "DMCreateDefaultSF" 3244 /*@C 3245 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3246 describing the data layout. 3247 3248 Input Parameters: 3249 + dm - The DM 3250 . localSection - PetscSection describing the local data layout 3251 - globalSection - PetscSection describing the global data layout 3252 3253 Level: intermediate 3254 3255 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3256 @*/ 3257 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3258 { 3259 MPI_Comm comm = ((PetscObject) dm)->comm; 3260 PetscLayout layout; 3261 const PetscInt *ranges; 3262 PetscInt *local; 3263 PetscSFNode *remote; 3264 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3265 PetscMPIInt size, rank; 3266 PetscErrorCode ierr; 3267 3268 PetscFunctionBegin; 3269 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3270 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3271 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3272 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3273 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3274 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3275 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3276 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3277 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3278 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3279 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 3280 PetscInt dof, cdof; 3281 3282 ierr = PetscSectionGetDof(globalSection, p, &dof);CHKERRQ(ierr); 3283 ierr = PetscSectionGetConstraintDof(globalSection, p, &cdof);CHKERRQ(ierr); 3284 nleaves += dof < 0 ? -(dof+1)-cdof : dof-cdof; 3285 } 3286 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3287 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3288 for (p = pStart, l = 0; p < pEnd; ++p) { 3289 PetscInt *cind; 3290 PetscInt dof, gdof, cdof, dim, off, goff, d, c; 3291 3292 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3293 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3294 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3295 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3296 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3297 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3298 dim = dof-cdof; 3299 for (d = 0, c = 0; d < dof; ++d) { 3300 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3301 local[l+d-c] = off+d; 3302 } 3303 if (gdof < 0) { 3304 for (d = 0; d < dim; ++d, ++l) { 3305 PetscInt offset = -(goff+1) + d, r; 3306 3307 for (r = 0; r < size; ++r) { 3308 if ((offset >= ranges[r]) && (offset < ranges[r+1])) break; 3309 } 3310 remote[l].rank = r; 3311 remote[l].index = offset - ranges[r]; 3312 } 3313 } else { 3314 for (d = 0; d < dim; ++d, ++l) { 3315 remote[l].rank = rank; 3316 remote[l].index = goff+d - ranges[rank]; 3317 } 3318 } 3319 } 3320 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3321 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3322 PetscFunctionReturn(0); 3323 } 3324 3325 #undef __FUNCT__ 3326 #define __FUNCT__ "DMGetPointSF" 3327 /*@ 3328 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3329 3330 Input Parameter: 3331 . dm - The DM 3332 3333 Output Parameter: 3334 . sf - The PetscSF 3335 3336 Level: intermediate 3337 3338 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3339 3340 .seealso: DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3341 @*/ 3342 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) { 3343 PetscFunctionBegin; 3344 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3345 PetscValidPointer(sf, 2); 3346 *sf = dm->sf; 3347 PetscFunctionReturn(0); 3348 } 3349 3350 #undef __FUNCT__ 3351 #define __FUNCT__ "DMGetNumFields" 3352 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3353 { 3354 PetscFunctionBegin; 3355 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3356 PetscValidPointer(numFields, 2); 3357 *numFields = dm->numFields; 3358 PetscFunctionReturn(0); 3359 } 3360 3361 #undef __FUNCT__ 3362 #define __FUNCT__ "DMSetNumFields" 3363 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3364 { 3365 PetscInt f; 3366 PetscErrorCode ierr; 3367 3368 PetscFunctionBegin; 3369 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3370 for (f = 0; f < dm->numFields; ++f) { 3371 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3372 } 3373 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3374 dm->numFields = numFields; 3375 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3376 for (f = 0; f < dm->numFields; ++f) { 3377 ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3378 } 3379 PetscFunctionReturn(0); 3380 } 3381 3382 #undef __FUNCT__ 3383 #define __FUNCT__ "DMGetField" 3384 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3385 { 3386 PetscFunctionBegin; 3387 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3388 PetscValidPointer(field, 2); 3389 if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3390 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3391 *field = dm->fields[f]; 3392 PetscFunctionReturn(0); 3393 } 3394