1 #include <petscsnes.h> 2 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = PETSC_NULL; 35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 36 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 39 #endif 40 41 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 42 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 43 44 45 v->ltogmap = PETSC_NULL; 46 v->ltogmapb = PETSC_NULL; 47 v->bs = 1; 48 v->coloringtype = IS_COLORING_GLOBAL; 49 v->lf = PETSC_NULL; 50 v->lj = PETSC_NULL; 51 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 52 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 53 v->defaultSection = PETSC_NULL; 54 v->defaultGlobalSection = PETSC_NULL; 55 { 56 PetscInt i; 57 for (i = 0; i < 10; ++i) { 58 v->nullspaceConstructors[i] = PETSC_NULL; 59 } 60 } 61 v->numFields = 0; 62 v->fields = PETSC_NULL; 63 64 *dm = v; 65 PetscFunctionReturn(0); 66 } 67 68 69 #undef __FUNCT__ 70 #define __FUNCT__ "DMSetVecType" 71 /*@C 72 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 73 74 Logically Collective on DMDA 75 76 Input Parameter: 77 + da - initial distributed array 78 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 79 80 Options Database: 81 . -dm_vec_type ctype 82 83 Level: intermediate 84 85 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 86 @*/ 87 PetscErrorCode DMSetVecType(DM da,VecType ctype) 88 { 89 PetscErrorCode ierr; 90 91 PetscFunctionBegin; 92 PetscValidHeaderSpecific(da,DM_CLASSID,1); 93 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 94 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 95 PetscFunctionReturn(0); 96 } 97 98 #undef __FUNCT__ 99 #define __FUNCT__ "DMSetMatType" 100 /*@C 101 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 102 103 Logically Collective on DM 104 105 Input Parameter: 106 + dm - the DM context 107 . ctype - the matrix type 108 109 Options Database: 110 . -dm_mat_type ctype 111 112 Level: intermediate 113 114 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 115 @*/ 116 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 117 { 118 PetscErrorCode ierr; 119 PetscFunctionBegin; 120 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 121 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 122 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 126 #undef __FUNCT__ 127 #define __FUNCT__ "DMSetOptionsPrefix" 128 /*@C 129 DMSetOptionsPrefix - Sets the prefix used for searching for all 130 DMDA options in the database. 131 132 Logically Collective on DMDA 133 134 Input Parameter: 135 + da - the DMDA context 136 - prefix - the prefix to prepend to all option names 137 138 Notes: 139 A hyphen (-) must NOT be given at the beginning of the prefix name. 140 The first character of all runtime options is AUTOMATICALLY the hyphen. 141 142 Level: advanced 143 144 .keywords: DMDA, set, options, prefix, database 145 146 .seealso: DMSetFromOptions() 147 @*/ 148 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 149 { 150 PetscErrorCode ierr; 151 152 PetscFunctionBegin; 153 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 154 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 155 PetscFunctionReturn(0); 156 } 157 158 #undef __FUNCT__ 159 #define __FUNCT__ "DMDestroy" 160 /*@ 161 DMDestroy - Destroys a vector packer or DMDA. 162 163 Collective on DM 164 165 Input Parameter: 166 . dm - the DM object to destroy 167 168 Level: developer 169 170 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 171 172 @*/ 173 PetscErrorCode DMDestroy(DM *dm) 174 { 175 PetscInt i, cnt = 0, f; 176 DMNamedVecLink nlink,nnext; 177 PetscErrorCode ierr; 178 179 PetscFunctionBegin; 180 if (!*dm) PetscFunctionReturn(0); 181 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 182 183 /* count all the circular references of DM and its contained Vecs */ 184 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 185 if ((*dm)->localin[i]) {cnt++;} 186 if ((*dm)->globalin[i]) {cnt++;} 187 } 188 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 189 if ((*dm)->x) { 190 PetscObject obj; 191 ierr = PetscObjectQuery((PetscObject)(*dm)->x,"DM",&obj);CHKERRQ(ierr); 192 if (obj == (PetscObject)*dm) cnt++; 193 } 194 195 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 196 /* 197 Need this test because the dm references the vectors that 198 reference the dm, so destroying the dm calls destroy on the 199 vectors that cause another destroy on the dm 200 */ 201 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 202 ((PetscObject) (*dm))->refct = 0; 203 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 204 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 205 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 206 } 207 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 208 nnext = nlink->next; 209 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 210 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 211 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 212 ierr = PetscFree(nlink);CHKERRQ(ierr); 213 } 214 (*dm)->namedglobal = PETSC_NULL; 215 216 /* Destroy the list of hooks */ 217 { 218 DMCoarsenHookLink link,next; 219 for (link=(*dm)->coarsenhook; link; link=next) { 220 next = link->next; 221 ierr = PetscFree(link);CHKERRQ(ierr); 222 } 223 (*dm)->coarsenhook = PETSC_NULL; 224 } 225 { 226 DMRefineHookLink link,next; 227 for (link=(*dm)->refinehook; link; link=next) { 228 next = link->next; 229 ierr = PetscFree(link);CHKERRQ(ierr); 230 } 231 (*dm)->refinehook = PETSC_NULL; 232 } 233 /* Destroy the work arrays */ 234 { 235 DMWorkLink link,next; 236 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 237 for (link=(*dm)->workin; link; link=next) { 238 next = link->next; 239 ierr = PetscFree(link->mem);CHKERRQ(ierr); 240 ierr = PetscFree(link);CHKERRQ(ierr); 241 } 242 (*dm)->workin = PETSC_NULL; 243 } 244 245 if ((*dm)->ctx && (*dm)->ctxdestroy) { 246 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 247 } 248 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 249 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 250 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 251 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 252 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 253 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 254 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 255 256 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 257 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 258 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 259 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 260 261 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 262 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 263 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 264 265 for (f = 0; f < (*dm)->numFields; ++f) { 266 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 267 } 268 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 269 /* if memory was published with AMS then destroy it */ 270 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 271 272 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 273 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 274 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 275 PetscFunctionReturn(0); 276 } 277 278 #undef __FUNCT__ 279 #define __FUNCT__ "DMSetUp" 280 /*@ 281 DMSetUp - sets up the data structures inside a DM object 282 283 Collective on DM 284 285 Input Parameter: 286 . dm - the DM object to setup 287 288 Level: developer 289 290 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 291 292 @*/ 293 PetscErrorCode DMSetUp(DM dm) 294 { 295 PetscErrorCode ierr; 296 297 PetscFunctionBegin; 298 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 299 if (dm->setupcalled) PetscFunctionReturn(0); 300 if (dm->ops->setup) { 301 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 302 } 303 dm->setupcalled = PETSC_TRUE; 304 PetscFunctionReturn(0); 305 } 306 307 #undef __FUNCT__ 308 #define __FUNCT__ "DMSetFromOptions" 309 /*@ 310 DMSetFromOptions - sets parameters in a DM from the options database 311 312 Collective on DM 313 314 Input Parameter: 315 . dm - the DM object to set options for 316 317 Options Database: 318 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 319 . -dm_vec_type <type> type of vector to create inside DM 320 . -dm_mat_type <type> type of matrix to create inside DM 321 - -dm_coloring_type <global or ghosted> 322 323 Level: developer 324 325 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 326 327 @*/ 328 PetscErrorCode DMSetFromOptions(DM dm) 329 { 330 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 331 PetscErrorCode ierr; 332 char typeName[256] = MATAIJ; 333 334 PetscFunctionBegin; 335 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 336 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 337 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 338 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 339 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 340 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 341 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 342 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 343 if (flg) { 344 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 345 } 346 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 347 if (flg) { 348 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 349 } 350 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 351 if (dm->ops->setfromoptions) { 352 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 353 } 354 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 355 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 356 ierr = PetscOptionsEnd();CHKERRQ(ierr); 357 if (flg1) { 358 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 359 } 360 if (flg2) { 361 PetscViewer viewer; 362 363 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 364 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 365 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 366 ierr = DMView(dm, viewer);CHKERRQ(ierr); 367 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 368 } 369 if (flg3) { 370 PetscViewer viewer; 371 372 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 373 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 374 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 375 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 376 ierr = DMView(dm, viewer);CHKERRQ(ierr); 377 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 378 } 379 if (flg4) { 380 PetscViewer viewer; 381 382 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 383 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 384 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 385 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 386 ierr = DMView(dm, viewer);CHKERRQ(ierr); 387 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 388 } 389 PetscFunctionReturn(0); 390 } 391 392 #undef __FUNCT__ 393 #define __FUNCT__ "DMView" 394 /*@C 395 DMView - Views a vector packer or DMDA. 396 397 Collective on DM 398 399 Input Parameter: 400 + dm - the DM object to view 401 - v - the viewer 402 403 Level: developer 404 405 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 406 407 @*/ 408 PetscErrorCode DMView(DM dm,PetscViewer v) 409 { 410 PetscErrorCode ierr; 411 412 PetscFunctionBegin; 413 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 414 if (!v) { 415 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 416 } 417 if (dm->ops->view) { 418 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 419 } 420 PetscFunctionReturn(0); 421 } 422 423 #undef __FUNCT__ 424 #define __FUNCT__ "DMCreateGlobalVector" 425 /*@ 426 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 427 428 Collective on DM 429 430 Input Parameter: 431 . dm - the DM object 432 433 Output Parameter: 434 . vec - the global vector 435 436 Level: beginner 437 438 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 439 440 @*/ 441 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 442 { 443 PetscErrorCode ierr; 444 445 PetscFunctionBegin; 446 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 447 if (dm->defaultSection) { 448 PetscSection gSection; 449 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 450 451 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 452 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 453 for(p = pStart; p < pEnd; ++p) { 454 PetscInt dof, cdof; 455 456 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 457 ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr); 458 if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof; 459 if ((dof > 0) && (dof-cdof != blockSize)) { 460 blockSize = 1; 461 break; 462 } 463 } 464 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 465 if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize); 466 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 467 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 468 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 469 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 470 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 471 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 472 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 473 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 474 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 475 } else { 476 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 477 } 478 PetscFunctionReturn(0); 479 } 480 481 #undef __FUNCT__ 482 #define __FUNCT__ "DMCreateLocalVector" 483 /*@ 484 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 485 486 Not Collective 487 488 Input Parameter: 489 . dm - the DM object 490 491 Output Parameter: 492 . vec - the local vector 493 494 Level: beginner 495 496 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 497 498 @*/ 499 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 500 { 501 PetscErrorCode ierr; 502 503 PetscFunctionBegin; 504 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 505 if (dm->defaultSection) { 506 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 507 508 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 509 for(p = pStart; p < pEnd; ++p) { 510 PetscInt dof; 511 512 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 513 if ((blockSize < 0) && (dof > 0)) blockSize = dof; 514 if ((dof > 0) && (dof != blockSize)) { 515 blockSize = 1; 516 break; 517 } 518 } 519 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 520 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 521 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 522 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 523 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 524 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 525 } else { 526 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 527 } 528 PetscFunctionReturn(0); 529 } 530 531 #undef __FUNCT__ 532 #define __FUNCT__ "DMGetLocalToGlobalMapping" 533 /*@ 534 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 535 536 Collective on DM 537 538 Input Parameter: 539 . dm - the DM that provides the mapping 540 541 Output Parameter: 542 . ltog - the mapping 543 544 Level: intermediate 545 546 Notes: 547 This mapping can then be used by VecSetLocalToGlobalMapping() or 548 MatSetLocalToGlobalMapping(). 549 550 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 551 @*/ 552 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 553 { 554 PetscErrorCode ierr; 555 556 PetscFunctionBegin; 557 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 558 PetscValidPointer(ltog,2); 559 if (!dm->ltogmap) { 560 PetscSection section, sectionGlobal; 561 562 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 563 if (section) { 564 PetscInt *ltog; 565 PetscInt pStart, pEnd, size, p, l; 566 567 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 568 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 569 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 570 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 571 for (p = pStart, l = 0; p < pEnd; ++p) { 572 PetscInt dof, off, c; 573 574 /* Should probably use constrained dofs */ 575 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 576 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 577 for (c = 0; c < dof; ++c, ++l) { 578 ltog[l] = off+c; 579 } 580 } 581 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 582 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 583 } else { 584 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 585 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 586 } 587 } 588 *ltog = dm->ltogmap; 589 PetscFunctionReturn(0); 590 } 591 592 #undef __FUNCT__ 593 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 594 /*@ 595 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 596 597 Collective on DM 598 599 Input Parameter: 600 . da - the distributed array that provides the mapping 601 602 Output Parameter: 603 . ltog - the block mapping 604 605 Level: intermediate 606 607 Notes: 608 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 609 MatSetLocalToGlobalMappingBlock(). 610 611 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 612 @*/ 613 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 614 { 615 PetscErrorCode ierr; 616 617 PetscFunctionBegin; 618 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 619 PetscValidPointer(ltog,2); 620 if (!dm->ltogmapb) { 621 PetscInt bs; 622 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 623 if (bs > 1) { 624 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 625 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 626 } else { 627 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 628 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 629 } 630 } 631 *ltog = dm->ltogmapb; 632 PetscFunctionReturn(0); 633 } 634 635 #undef __FUNCT__ 636 #define __FUNCT__ "DMGetBlockSize" 637 /*@ 638 DMGetBlockSize - Gets the inherent block size associated with a DM 639 640 Not Collective 641 642 Input Parameter: 643 . dm - the DM with block structure 644 645 Output Parameter: 646 . bs - the block size, 1 implies no exploitable block structure 647 648 Level: intermediate 649 650 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 651 @*/ 652 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 653 { 654 PetscFunctionBegin; 655 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 656 PetscValidPointer(bs,2); 657 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 658 *bs = dm->bs; 659 PetscFunctionReturn(0); 660 } 661 662 #undef __FUNCT__ 663 #define __FUNCT__ "DMCreateInterpolation" 664 /*@ 665 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 666 667 Collective on DM 668 669 Input Parameter: 670 + dm1 - the DM object 671 - dm2 - the second, finer DM object 672 673 Output Parameter: 674 + mat - the interpolation 675 - vec - the scaling (optional) 676 677 Level: developer 678 679 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 680 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 681 682 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 683 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 684 685 686 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 687 688 @*/ 689 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 690 { 691 PetscErrorCode ierr; 692 693 PetscFunctionBegin; 694 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 695 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 696 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 697 PetscFunctionReturn(0); 698 } 699 700 #undef __FUNCT__ 701 #define __FUNCT__ "DMCreateInjection" 702 /*@ 703 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 704 705 Collective on DM 706 707 Input Parameter: 708 + dm1 - the DM object 709 - dm2 - the second, finer DM object 710 711 Output Parameter: 712 . ctx - the injection 713 714 Level: developer 715 716 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 717 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 718 719 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 720 721 @*/ 722 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 723 { 724 PetscErrorCode ierr; 725 726 PetscFunctionBegin; 727 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 728 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 729 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 730 PetscFunctionReturn(0); 731 } 732 733 #undef __FUNCT__ 734 #define __FUNCT__ "DMCreateColoring" 735 /*@C 736 DMCreateColoring - Gets coloring for a DMDA or DMComposite 737 738 Collective on DM 739 740 Input Parameter: 741 + dm - the DM object 742 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 743 - matype - either MATAIJ or MATBAIJ 744 745 Output Parameter: 746 . coloring - the coloring 747 748 Level: developer 749 750 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 751 752 @*/ 753 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 754 { 755 PetscErrorCode ierr; 756 757 PetscFunctionBegin; 758 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 759 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 760 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 761 PetscFunctionReturn(0); 762 } 763 764 #undef __FUNCT__ 765 #define __FUNCT__ "DMCreateMatrix" 766 /*@C 767 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 768 769 Collective on DM 770 771 Input Parameter: 772 + dm - the DM object 773 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 774 any type which inherits from one of these (such as MATAIJ) 775 776 Output Parameter: 777 . mat - the empty Jacobian 778 779 Level: beginner 780 781 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 782 do not need to do it yourself. 783 784 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 785 the nonzero pattern call DMDASetMatPreallocateOnly() 786 787 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 788 internally by PETSc. 789 790 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 791 the indices for the global numbering for DMDAs which is complicated. 792 793 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 794 795 @*/ 796 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 797 { 798 PetscErrorCode ierr; 799 800 PetscFunctionBegin; 801 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 802 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 803 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 804 #endif 805 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 806 PetscValidPointer(mat,3); 807 if (dm->mattype) { 808 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 809 } else { 810 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 811 } 812 PetscFunctionReturn(0); 813 } 814 815 #undef __FUNCT__ 816 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 817 /*@ 818 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 819 preallocated but the nonzero structure and zero values will not be set. 820 821 Logically Collective on DMDA 822 823 Input Parameter: 824 + dm - the DM 825 - only - PETSC_TRUE if only want preallocation 826 827 Level: developer 828 .seealso DMCreateMatrix() 829 @*/ 830 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 831 { 832 PetscFunctionBegin; 833 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 834 dm->prealloc_only = only; 835 PetscFunctionReturn(0); 836 } 837 838 #undef __FUNCT__ 839 #define __FUNCT__ "DMGetWorkArray" 840 /*@C 841 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 842 843 Not Collective 844 845 Input Parameters: 846 + dm - the DM object 847 . count - The minium size 848 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 849 850 Output Parameter: 851 . array - the work array 852 853 Level: developer 854 855 .seealso DMDestroy(), DMCreate() 856 @*/ 857 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 858 { 859 PetscErrorCode ierr; 860 DMWorkLink link; 861 size_t size; 862 863 PetscFunctionBegin; 864 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 865 PetscValidPointer(mem,4); 866 if (dm->workin) { 867 link = dm->workin; 868 dm->workin = dm->workin->next; 869 } else { 870 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 871 } 872 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 873 if (size*count > link->bytes) { 874 ierr = PetscFree(link->mem);CHKERRQ(ierr); 875 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 876 link->bytes = size*count; 877 } 878 link->next = dm->workout; 879 dm->workout = link; 880 *(void**)mem = link->mem; 881 PetscFunctionReturn(0); 882 } 883 884 #undef __FUNCT__ 885 #define __FUNCT__ "DMRestoreWorkArray" 886 /*@C 887 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 888 889 Not Collective 890 891 Input Parameters: 892 + dm - the DM object 893 . count - The minium size 894 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 895 896 Output Parameter: 897 . array - the work array 898 899 Level: developer 900 901 .seealso DMDestroy(), DMCreate() 902 @*/ 903 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 904 { 905 DMWorkLink *p,link; 906 907 PetscFunctionBegin; 908 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 909 PetscValidPointer(mem,4); 910 for (p=&dm->workout; (link=*p); p=&link->next) { 911 if (link->mem == *(void**)mem) { 912 *p = link->next; 913 link->next = dm->workin; 914 dm->workin = link; 915 *(void**)mem = PETSC_NULL; 916 PetscFunctionReturn(0); 917 } 918 } 919 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 920 PetscFunctionReturn(0); 921 } 922 923 #undef __FUNCT__ 924 #define __FUNCT__ "DMSetNullSpaceConstructor" 925 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 926 { 927 PetscFunctionBegin; 928 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 929 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 930 dm->nullspaceConstructors[field] = nullsp; 931 PetscFunctionReturn(0); 932 } 933 934 #undef __FUNCT__ 935 #define __FUNCT__ "DMCreateFieldIS" 936 /*@C 937 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 938 939 Not collective 940 941 Input Parameter: 942 . dm - the DM object 943 944 Output Parameters: 945 + numFields - The number of fields (or PETSC_NULL if not requested) 946 . fieldNames - The name for each field (or PETSC_NULL if not requested) 947 - fields - The global indices for each field (or PETSC_NULL if not requested) 948 949 Level: intermediate 950 951 Notes: 952 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 953 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 954 PetscFree(). 955 956 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 957 @*/ 958 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 959 { 960 PetscSection section, sectionGlobal; 961 PetscErrorCode ierr; 962 963 PetscFunctionBegin; 964 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 965 if (numFields) { 966 PetscValidPointer(numFields,2); 967 *numFields = 0; 968 } 969 if (fieldNames) { 970 PetscValidPointer(fieldNames,3); 971 *fieldNames = PETSC_NULL; 972 } 973 if (fields) { 974 PetscValidPointer(fields,4); 975 *fields = PETSC_NULL; 976 } 977 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 978 if (section) { 979 PetscInt *fieldSizes, **fieldIndices; 980 PetscInt nF, f, pStart, pEnd, p; 981 982 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 983 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 984 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 985 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 986 for (f = 0; f < nF; ++f) { 987 fieldSizes[f] = 0; 988 } 989 for (p = pStart; p < pEnd; ++p) { 990 PetscInt gdof; 991 992 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 993 if (gdof > 0) { 994 for (f = 0; f < nF; ++f) { 995 PetscInt fdof, fcdof; 996 997 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 998 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 999 fieldSizes[f] += fdof-fcdof; 1000 } 1001 } 1002 } 1003 for (f = 0; f < nF; ++f) { 1004 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1005 fieldSizes[f] = 0; 1006 } 1007 for (p = pStart; p < pEnd; ++p) { 1008 PetscInt gdof, goff; 1009 1010 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1011 if (gdof > 0) { 1012 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1013 for (f = 0; f < nF; ++f) { 1014 PetscInt fdof, fcdof, fc; 1015 1016 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1017 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1018 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1019 fieldIndices[f][fieldSizes[f]] = goff++; 1020 } 1021 } 1022 } 1023 } 1024 if (numFields) {*numFields = nF;} 1025 if (fieldNames) { 1026 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 1027 for (f = 0; f < nF; ++f) { 1028 const char *fieldName; 1029 1030 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1031 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 1032 } 1033 } 1034 if (fields) { 1035 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1036 for (f = 0; f < nF; ++f) { 1037 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1038 } 1039 } 1040 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1041 } else { 1042 if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 1043 } 1044 PetscFunctionReturn(0); 1045 } 1046 1047 #undef __FUNCT__ 1048 #define __FUNCT__ "DMCreateFieldDecompositionDM" 1049 /*@C 1050 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 1051 1052 Not Collective 1053 1054 Input Parameters: 1055 + dm - the DM object 1056 - name - the name of the field decomposition 1057 1058 Output Parameter: 1059 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 1060 1061 Level: advanced 1062 1063 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1064 @*/ 1065 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 1066 { 1067 PetscErrorCode ierr; 1068 1069 PetscFunctionBegin; 1070 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1071 PetscValidCharPointer(name,2); 1072 PetscValidPointer(ddm,3); 1073 *ddm = PETSC_NULL; 1074 if (dm->ops->createfielddecompositiondm) { 1075 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1076 } 1077 PetscFunctionReturn(0); 1078 } 1079 1080 #undef __FUNCT__ 1081 #define __FUNCT__ "DMCreateFieldDecomposition" 1082 /*@C 1083 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1084 corresponding to different fields: each IS contains the global indices of the dofs of the 1085 corresponding field. The optional list of DMs define the DM for each subproblem. 1086 Generalizes DMCreateFieldIS(). 1087 1088 Not collective 1089 1090 Input Parameter: 1091 . dm - the DM object 1092 1093 Output Parameters: 1094 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1095 . namelist - The name for each field (or PETSC_NULL if not requested) 1096 . islist - The global indices for each field (or PETSC_NULL if not requested) 1097 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1098 1099 Level: intermediate 1100 1101 Notes: 1102 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1103 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1104 and all of the arrays should be freed with PetscFree(). 1105 1106 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1107 @*/ 1108 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1109 { 1110 PetscErrorCode ierr; 1111 1112 PetscFunctionBegin; 1113 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1114 if (len) {PetscValidPointer(len,2); *len = 0;} 1115 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1116 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1117 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1118 if (!dm->ops->createfielddecomposition) { 1119 PetscSection section; 1120 PetscInt numFields, f; 1121 1122 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1123 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1124 if (section && numFields && dm->ops->createsubdm) { 1125 *len = numFields; 1126 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1127 for (f = 0; f < numFields; ++f) { 1128 const char *fieldName; 1129 1130 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1131 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1132 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1133 } 1134 } else { 1135 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1136 /* By default there are no DMs associated with subproblems. */ 1137 if (dmlist) *dmlist = PETSC_NULL; 1138 } 1139 } 1140 else { 1141 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1142 } 1143 PetscFunctionReturn(0); 1144 } 1145 1146 #undef __FUNCT__ 1147 #define __FUNCT__ "DMCreateSubDM" 1148 /*@C 1149 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1150 The fields are defined by DMCreateFieldIS(). 1151 1152 Not collective 1153 1154 Input Parameters: 1155 + dm - the DM object 1156 . numFields - number of fields in this subproblem 1157 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1158 1159 Output Parameters: 1160 . is - The global indices for the subproblem 1161 - dm - The DM for the subproblem 1162 1163 Level: intermediate 1164 1165 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1166 @*/ 1167 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1168 { 1169 PetscErrorCode ierr; 1170 1171 PetscFunctionBegin; 1172 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1173 PetscValidPointer(fields,3); 1174 if (is) {PetscValidPointer(is,4);} 1175 if (subdm) {PetscValidPointer(subdm,5);} 1176 if (dm->ops->createsubdm) { 1177 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1178 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1179 PetscFunctionReturn(0); 1180 } 1181 1182 #undef __FUNCT__ 1183 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1184 /*@C 1185 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1186 1187 Not Collective 1188 1189 Input Parameters: 1190 + dm - the DM object 1191 - name - the name of the subdomain decomposition 1192 1193 Output Parameter: 1194 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1195 1196 Level: advanced 1197 1198 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1199 @*/ 1200 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1201 { 1202 PetscErrorCode ierr; 1203 1204 PetscFunctionBegin; 1205 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1206 PetscValidCharPointer(name,2); 1207 PetscValidPointer(ddm,3); 1208 *ddm = PETSC_NULL; 1209 if (dm->ops->createdomaindecompositiondm) { 1210 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1211 } 1212 PetscFunctionReturn(0); 1213 } 1214 1215 #undef __FUNCT__ 1216 #define __FUNCT__ "DMCreateDomainDecomposition" 1217 /*@C 1218 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1219 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1220 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1221 define a nonoverlapping covering, while outer subdomains can overlap. 1222 The optional list of DMs define the DM for each subproblem. 1223 1224 Not collective 1225 1226 Input Parameter: 1227 . dm - the DM object 1228 1229 Output Parameters: 1230 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1231 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1232 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1233 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1234 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1235 1236 Level: intermediate 1237 1238 Notes: 1239 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1240 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1241 and all of the arrays should be freed with PetscFree(). 1242 1243 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1244 @*/ 1245 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1246 { 1247 PetscErrorCode ierr; 1248 1249 PetscFunctionBegin; 1250 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1251 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1252 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1253 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1254 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1255 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1256 if (dm->ops->createdomaindecomposition) { 1257 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1258 } 1259 PetscFunctionReturn(0); 1260 } 1261 1262 #undef __FUNCT__ 1263 #define __FUNCT__ "DMRefine" 1264 /*@ 1265 DMRefine - Refines a DM object 1266 1267 Collective on DM 1268 1269 Input Parameter: 1270 + dm - the DM object 1271 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1272 1273 Output Parameter: 1274 . dmf - the refined DM, or PETSC_NULL 1275 1276 Note: If no refinement was done, the return value is PETSC_NULL 1277 1278 Level: developer 1279 1280 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1281 @*/ 1282 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1283 { 1284 PetscErrorCode ierr; 1285 DMRefineHookLink link; 1286 1287 PetscFunctionBegin; 1288 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1289 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1290 if (*dmf) { 1291 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1292 (*dmf)->ops->initialguess = dm->ops->initialguess; 1293 (*dmf)->ops->function = dm->ops->function; 1294 (*dmf)->ops->functionj = dm->ops->functionj; 1295 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1296 (*dmf)->ops->jacobian = dm->ops->jacobian; 1297 } 1298 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1299 (*dmf)->ctx = dm->ctx; 1300 (*dmf)->leveldown = dm->leveldown; 1301 (*dmf)->levelup = dm->levelup + 1; 1302 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1303 for (link=dm->refinehook; link; link=link->next) { 1304 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1305 } 1306 } 1307 PetscFunctionReturn(0); 1308 } 1309 1310 #undef __FUNCT__ 1311 #define __FUNCT__ "DMRefineHookAdd" 1312 /*@ 1313 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1314 1315 Logically Collective 1316 1317 Input Arguments: 1318 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1319 . refinehook - function to run when setting up a coarser level 1320 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1321 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1322 1323 Calling sequence of refinehook: 1324 $ refinehook(DM coarse,DM fine,void *ctx); 1325 1326 + coarse - coarse level DM 1327 . fine - fine level DM to interpolate problem to 1328 - ctx - optional user-defined function context 1329 1330 Calling sequence for interphook: 1331 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1332 1333 + coarse - coarse level DM 1334 . interp - matrix interpolating a coarse-level solution to the finer grid 1335 . fine - fine level DM to update 1336 - ctx - optional user-defined function context 1337 1338 Level: advanced 1339 1340 Notes: 1341 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1342 1343 If this function is called multiple times, the hooks will be run in the order they are added. 1344 1345 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1346 @*/ 1347 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1348 { 1349 PetscErrorCode ierr; 1350 DMRefineHookLink link,*p; 1351 1352 PetscFunctionBegin; 1353 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1354 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1355 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1356 link->refinehook = refinehook; 1357 link->interphook = interphook; 1358 link->ctx = ctx; 1359 link->next = PETSC_NULL; 1360 *p = link; 1361 PetscFunctionReturn(0); 1362 } 1363 1364 #undef __FUNCT__ 1365 #define __FUNCT__ "DMInterpolate" 1366 /*@ 1367 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1368 1369 Collective if any hooks are 1370 1371 Input Arguments: 1372 + coarse - coarser DM to use as a base 1373 . restrct - interpolation matrix, apply using MatInterpolate() 1374 - fine - finer DM to update 1375 1376 Level: developer 1377 1378 .seealso: DMRefineHookAdd(), MatInterpolate() 1379 @*/ 1380 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1381 { 1382 PetscErrorCode ierr; 1383 DMRefineHookLink link; 1384 1385 PetscFunctionBegin; 1386 for (link=fine->refinehook; link; link=link->next) { 1387 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1388 } 1389 PetscFunctionReturn(0); 1390 } 1391 1392 #undef __FUNCT__ 1393 #define __FUNCT__ "DMGetRefineLevel" 1394 /*@ 1395 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1396 1397 Not Collective 1398 1399 Input Parameter: 1400 . dm - the DM object 1401 1402 Output Parameter: 1403 . level - number of refinements 1404 1405 Level: developer 1406 1407 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1408 1409 @*/ 1410 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1411 { 1412 PetscFunctionBegin; 1413 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1414 *level = dm->levelup; 1415 PetscFunctionReturn(0); 1416 } 1417 1418 #undef __FUNCT__ 1419 #define __FUNCT__ "DMGlobalToLocalBegin" 1420 /*@ 1421 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1422 1423 Neighbor-wise Collective on DM 1424 1425 Input Parameters: 1426 + dm - the DM object 1427 . g - the global vector 1428 . mode - INSERT_VALUES or ADD_VALUES 1429 - l - the local vector 1430 1431 1432 Level: beginner 1433 1434 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1435 1436 @*/ 1437 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1438 { 1439 PetscSF sf; 1440 PetscErrorCode ierr; 1441 1442 PetscFunctionBegin; 1443 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1444 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1445 if (sf) { 1446 PetscScalar *lArray, *gArray; 1447 1448 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1449 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1450 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1451 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1452 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1453 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1454 } else { 1455 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1456 } 1457 PetscFunctionReturn(0); 1458 } 1459 1460 #undef __FUNCT__ 1461 #define __FUNCT__ "DMGlobalToLocalEnd" 1462 /*@ 1463 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1464 1465 Neighbor-wise Collective on DM 1466 1467 Input Parameters: 1468 + dm - the DM object 1469 . g - the global vector 1470 . mode - INSERT_VALUES or ADD_VALUES 1471 - l - the local vector 1472 1473 1474 Level: beginner 1475 1476 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1477 1478 @*/ 1479 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1480 { 1481 PetscSF sf; 1482 PetscErrorCode ierr; 1483 PetscScalar *lArray, *gArray; 1484 1485 PetscFunctionBegin; 1486 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1487 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1488 if (sf) { 1489 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1490 1491 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1492 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1493 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1494 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1495 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1496 } else { 1497 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1498 } 1499 PetscFunctionReturn(0); 1500 } 1501 1502 #undef __FUNCT__ 1503 #define __FUNCT__ "DMLocalToGlobalBegin" 1504 /*@ 1505 DMLocalToGlobalBegin - updates global vectors from local vectors 1506 1507 Neighbor-wise Collective on DM 1508 1509 Input Parameters: 1510 + dm - the DM object 1511 . l - the local vector 1512 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1513 base point. 1514 - - the global vector 1515 1516 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1517 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1518 global array to the final global array with VecAXPY(). 1519 1520 Level: beginner 1521 1522 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1523 1524 @*/ 1525 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1526 { 1527 PetscSF sf; 1528 PetscErrorCode ierr; 1529 1530 PetscFunctionBegin; 1531 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1532 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1533 if (sf) { 1534 MPI_Op op; 1535 PetscScalar *lArray, *gArray; 1536 1537 switch(mode) { 1538 case INSERT_VALUES: 1539 case INSERT_ALL_VALUES: 1540 #if defined(PETSC_HAVE_MPI_REPLACE) 1541 op = MPI_REPLACE; break; 1542 #else 1543 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1544 #endif 1545 case ADD_VALUES: 1546 case ADD_ALL_VALUES: 1547 op = MPI_SUM; break; 1548 default: 1549 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1550 } 1551 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1552 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1553 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1554 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1555 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1556 } else { 1557 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1558 } 1559 PetscFunctionReturn(0); 1560 } 1561 1562 #undef __FUNCT__ 1563 #define __FUNCT__ "DMLocalToGlobalEnd" 1564 /*@ 1565 DMLocalToGlobalEnd - updates global vectors from local vectors 1566 1567 Neighbor-wise Collective on DM 1568 1569 Input Parameters: 1570 + dm - the DM object 1571 . l - the local vector 1572 . mode - INSERT_VALUES or ADD_VALUES 1573 - g - the global vector 1574 1575 1576 Level: beginner 1577 1578 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1579 1580 @*/ 1581 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1582 { 1583 PetscSF sf; 1584 PetscErrorCode ierr; 1585 1586 PetscFunctionBegin; 1587 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1588 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1589 if (sf) { 1590 MPI_Op op; 1591 PetscScalar *lArray, *gArray; 1592 1593 switch(mode) { 1594 case INSERT_VALUES: 1595 case INSERT_ALL_VALUES: 1596 #if defined(PETSC_HAVE_MPI_REPLACE) 1597 op = MPI_REPLACE; break; 1598 #else 1599 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1600 #endif 1601 case ADD_VALUES: 1602 case ADD_ALL_VALUES: 1603 op = MPI_SUM; break; 1604 default: 1605 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1606 } 1607 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1608 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1609 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1610 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1611 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1612 } else { 1613 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1614 } 1615 PetscFunctionReturn(0); 1616 } 1617 1618 #undef __FUNCT__ 1619 #define __FUNCT__ "DMComputeJacobianDefault" 1620 /*@ 1621 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1622 1623 Collective on DM 1624 1625 Input Parameter: 1626 + dm - the DM object 1627 . x - location to compute Jacobian at; may be ignored for linear problems 1628 . A - matrix that defines the operator for the linear solve 1629 - B - the matrix used to construct the preconditioner 1630 1631 Level: developer 1632 1633 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1634 DMSetFunction() 1635 1636 @*/ 1637 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1638 { 1639 PetscErrorCode ierr; 1640 1641 PetscFunctionBegin; 1642 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1643 *stflag = SAME_NONZERO_PATTERN; 1644 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1645 if (A != B) { 1646 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1647 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1648 } 1649 PetscFunctionReturn(0); 1650 } 1651 1652 #undef __FUNCT__ 1653 #define __FUNCT__ "DMCoarsen" 1654 /*@ 1655 DMCoarsen - Coarsens a DM object 1656 1657 Collective on DM 1658 1659 Input Parameter: 1660 + dm - the DM object 1661 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1662 1663 Output Parameter: 1664 . dmc - the coarsened DM 1665 1666 Level: developer 1667 1668 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1669 1670 @*/ 1671 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1672 { 1673 PetscErrorCode ierr; 1674 DMCoarsenHookLink link; 1675 1676 PetscFunctionBegin; 1677 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1678 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1679 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1680 (*dmc)->ops->initialguess = dm->ops->initialguess; 1681 (*dmc)->ops->function = dm->ops->function; 1682 (*dmc)->ops->functionj = dm->ops->functionj; 1683 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1684 (*dmc)->ops->jacobian = dm->ops->jacobian; 1685 } 1686 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1687 (*dmc)->ctx = dm->ctx; 1688 (*dmc)->levelup = dm->levelup; 1689 (*dmc)->leveldown = dm->leveldown + 1; 1690 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1691 for (link=dm->coarsenhook; link; link=link->next) { 1692 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1693 } 1694 PetscFunctionReturn(0); 1695 } 1696 1697 #undef __FUNCT__ 1698 #define __FUNCT__ "DMCoarsenHookAdd" 1699 /*@ 1700 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1701 1702 Logically Collective 1703 1704 Input Arguments: 1705 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1706 . coarsenhook - function to run when setting up a coarser level 1707 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1708 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1709 1710 Calling sequence of coarsenhook: 1711 $ coarsenhook(DM fine,DM coarse,void *ctx); 1712 1713 + fine - fine level DM 1714 . coarse - coarse level DM to restrict problem to 1715 - ctx - optional user-defined function context 1716 1717 Calling sequence for restricthook: 1718 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1719 1720 + fine - fine level DM 1721 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1722 . rscale - scaling vector for restriction 1723 . inject - matrix restricting by injection 1724 . coarse - coarse level DM to update 1725 - ctx - optional user-defined function context 1726 1727 Level: advanced 1728 1729 Notes: 1730 This function is only needed if auxiliary data needs to be set up on coarse grids. 1731 1732 If this function is called multiple times, the hooks will be run in the order they are added. 1733 1734 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1735 extract the finest level information from its context (instead of from the SNES). 1736 1737 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1738 @*/ 1739 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1740 { 1741 PetscErrorCode ierr; 1742 DMCoarsenHookLink link,*p; 1743 1744 PetscFunctionBegin; 1745 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1746 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1747 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1748 link->coarsenhook = coarsenhook; 1749 link->restricthook = restricthook; 1750 link->ctx = ctx; 1751 link->next = PETSC_NULL; 1752 *p = link; 1753 PetscFunctionReturn(0); 1754 } 1755 1756 #undef __FUNCT__ 1757 #define __FUNCT__ "DMRestrict" 1758 /*@ 1759 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1760 1761 Collective if any hooks are 1762 1763 Input Arguments: 1764 + fine - finer DM to use as a base 1765 . restrct - restriction matrix, apply using MatRestrict() 1766 . inject - injection matrix, also use MatRestrict() 1767 - coarse - coarer DM to update 1768 1769 Level: developer 1770 1771 .seealso: DMCoarsenHookAdd(), MatRestrict() 1772 @*/ 1773 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1774 { 1775 PetscErrorCode ierr; 1776 DMCoarsenHookLink link; 1777 1778 PetscFunctionBegin; 1779 for (link=fine->coarsenhook; link; link=link->next) { 1780 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1781 } 1782 PetscFunctionReturn(0); 1783 } 1784 1785 #undef __FUNCT__ 1786 #define __FUNCT__ "DMBlockRestrictHookAdd" 1787 /*@ 1788 DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1789 1790 Logically Collective 1791 1792 Input Arguments: 1793 + global - global DM 1794 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1795 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1796 1797 Calling sequence for restricthook: 1798 $ restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1799 1800 + global - global DM 1801 . out - scatter to the outer (with ghost and overlap points) block vector 1802 . in - scatter to block vector values only owned locally 1803 . block - block DM (may just be a shell if the global DM is passed in correctly) 1804 - ctx - optional user-defined function context 1805 1806 Level: advanced 1807 1808 Notes: 1809 This function is only needed if auxiliary data needs to be set up on coarse grids. 1810 1811 If this function is called multiple times, the hooks will be run in the order they are added. 1812 1813 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1814 extract the finest level information from its context (instead of from the SNES). 1815 1816 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1817 @*/ 1818 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1819 { 1820 PetscErrorCode ierr; 1821 DMBlockRestrictHookLink link,*p; 1822 1823 PetscFunctionBegin; 1824 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1825 for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1826 ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr); 1827 link->restricthook = restricthook; 1828 link->ctx = ctx; 1829 link->next = PETSC_NULL; 1830 *p = link; 1831 PetscFunctionReturn(0); 1832 } 1833 1834 #undef __FUNCT__ 1835 #define __FUNCT__ "DMBlockRestrict" 1836 /*@ 1837 DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd() 1838 1839 Collective if any hooks are 1840 1841 Input Arguments: 1842 + fine - finer DM to use as a base 1843 . restrct - restriction matrix, apply using MatRestrict() 1844 . inject - injection matrix, also use MatRestrict() 1845 - coarse - coarer DM to update 1846 1847 Level: developer 1848 1849 .seealso: DMCoarsenHookAdd(), MatRestrict() 1850 @*/ 1851 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block) 1852 { 1853 PetscErrorCode ierr; 1854 DMBlockRestrictHookLink link; 1855 1856 PetscFunctionBegin; 1857 for (link=global->blockrestricthook; link; link=link->next) { 1858 if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);} 1859 } 1860 PetscFunctionReturn(0); 1861 } 1862 1863 #undef __FUNCT__ 1864 #define __FUNCT__ "DMGetCoarsenLevel" 1865 /*@ 1866 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1867 1868 Not Collective 1869 1870 Input Parameter: 1871 . dm - the DM object 1872 1873 Output Parameter: 1874 . level - number of coarsenings 1875 1876 Level: developer 1877 1878 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1879 1880 @*/ 1881 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 1882 { 1883 PetscFunctionBegin; 1884 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1885 *level = dm->leveldown; 1886 PetscFunctionReturn(0); 1887 } 1888 1889 1890 1891 #undef __FUNCT__ 1892 #define __FUNCT__ "DMRefineHierarchy" 1893 /*@C 1894 DMRefineHierarchy - Refines a DM object, all levels at once 1895 1896 Collective on DM 1897 1898 Input Parameter: 1899 + dm - the DM object 1900 - nlevels - the number of levels of refinement 1901 1902 Output Parameter: 1903 . dmf - the refined DM hierarchy 1904 1905 Level: developer 1906 1907 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1908 1909 @*/ 1910 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 1911 { 1912 PetscErrorCode ierr; 1913 1914 PetscFunctionBegin; 1915 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1916 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1917 if (nlevels == 0) PetscFunctionReturn(0); 1918 if (dm->ops->refinehierarchy) { 1919 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 1920 } else if (dm->ops->refine) { 1921 PetscInt i; 1922 1923 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 1924 for (i=1; i<nlevels; i++) { 1925 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 1926 } 1927 } else { 1928 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 1929 } 1930 PetscFunctionReturn(0); 1931 } 1932 1933 #undef __FUNCT__ 1934 #define __FUNCT__ "DMCoarsenHierarchy" 1935 /*@C 1936 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 1937 1938 Collective on DM 1939 1940 Input Parameter: 1941 + dm - the DM object 1942 - nlevels - the number of levels of coarsening 1943 1944 Output Parameter: 1945 . dmc - the coarsened DM hierarchy 1946 1947 Level: developer 1948 1949 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1950 1951 @*/ 1952 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 1953 { 1954 PetscErrorCode ierr; 1955 1956 PetscFunctionBegin; 1957 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1958 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1959 if (nlevels == 0) PetscFunctionReturn(0); 1960 PetscValidPointer(dmc,3); 1961 if (dm->ops->coarsenhierarchy) { 1962 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 1963 } else if (dm->ops->coarsen) { 1964 PetscInt i; 1965 1966 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 1967 for (i=1; i<nlevels; i++) { 1968 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 1969 } 1970 } else { 1971 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 1972 } 1973 PetscFunctionReturn(0); 1974 } 1975 1976 #undef __FUNCT__ 1977 #define __FUNCT__ "DMCreateAggregates" 1978 /*@ 1979 DMCreateAggregates - Gets the aggregates that map between 1980 grids associated with two DMs. 1981 1982 Collective on DM 1983 1984 Input Parameters: 1985 + dmc - the coarse grid DM 1986 - dmf - the fine grid DM 1987 1988 Output Parameters: 1989 . rest - the restriction matrix (transpose of the projection matrix) 1990 1991 Level: intermediate 1992 1993 .keywords: interpolation, restriction, multigrid 1994 1995 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 1996 @*/ 1997 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 1998 { 1999 PetscErrorCode ierr; 2000 2001 PetscFunctionBegin; 2002 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2003 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2004 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2005 PetscFunctionReturn(0); 2006 } 2007 2008 #undef __FUNCT__ 2009 #define __FUNCT__ "DMSetApplicationContextDestroy" 2010 /*@C 2011 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2012 2013 Not Collective 2014 2015 Input Parameters: 2016 + dm - the DM object 2017 - destroy - the destroy function 2018 2019 Level: intermediate 2020 2021 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2022 2023 @*/ 2024 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2025 { 2026 PetscFunctionBegin; 2027 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2028 dm->ctxdestroy = destroy; 2029 PetscFunctionReturn(0); 2030 } 2031 2032 #undef __FUNCT__ 2033 #define __FUNCT__ "DMSetApplicationContext" 2034 /*@ 2035 DMSetApplicationContext - Set a user context into a DM object 2036 2037 Not Collective 2038 2039 Input Parameters: 2040 + dm - the DM object 2041 - ctx - the user context 2042 2043 Level: intermediate 2044 2045 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2046 2047 @*/ 2048 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2049 { 2050 PetscFunctionBegin; 2051 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2052 dm->ctx = ctx; 2053 PetscFunctionReturn(0); 2054 } 2055 2056 #undef __FUNCT__ 2057 #define __FUNCT__ "DMGetApplicationContext" 2058 /*@ 2059 DMGetApplicationContext - Gets a user context from a DM object 2060 2061 Not Collective 2062 2063 Input Parameter: 2064 . dm - the DM object 2065 2066 Output Parameter: 2067 . ctx - the user context 2068 2069 Level: intermediate 2070 2071 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2072 2073 @*/ 2074 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2075 { 2076 PetscFunctionBegin; 2077 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2078 *(void**)ctx = dm->ctx; 2079 PetscFunctionReturn(0); 2080 } 2081 2082 #undef __FUNCT__ 2083 #define __FUNCT__ "DMSetInitialGuess" 2084 /*@C 2085 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 2086 2087 Logically Collective on DM 2088 2089 Input Parameter: 2090 + dm - the DM object to destroy 2091 - f - the function to compute the initial guess 2092 2093 Level: intermediate 2094 2095 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2096 2097 @*/ 2098 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 2099 { 2100 PetscFunctionBegin; 2101 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2102 dm->ops->initialguess = f; 2103 PetscFunctionReturn(0); 2104 } 2105 2106 #undef __FUNCT__ 2107 #define __FUNCT__ "DMSetFunction" 2108 /*@C 2109 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 2110 2111 Logically Collective on DM 2112 2113 Input Parameter: 2114 + dm - the DM object 2115 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 2116 2117 Level: intermediate 2118 2119 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 2120 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 2121 2122 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2123 DMSetJacobian() 2124 2125 @*/ 2126 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2127 { 2128 PetscFunctionBegin; 2129 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2130 dm->ops->function = f; 2131 if (f) { 2132 dm->ops->functionj = f; 2133 } 2134 PetscFunctionReturn(0); 2135 } 2136 2137 #undef __FUNCT__ 2138 #define __FUNCT__ "DMSetJacobian" 2139 /*@C 2140 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 2141 2142 Logically Collective on DM 2143 2144 Input Parameter: 2145 + dm - the DM object to destroy 2146 - f - the function to compute the matrix entries 2147 2148 Level: intermediate 2149 2150 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2151 DMSetFunction() 2152 2153 @*/ 2154 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 2155 { 2156 PetscFunctionBegin; 2157 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2158 dm->ops->jacobian = f; 2159 PetscFunctionReturn(0); 2160 } 2161 2162 #undef __FUNCT__ 2163 #define __FUNCT__ "DMSetVariableBounds" 2164 /*@C 2165 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2166 2167 Logically Collective on DM 2168 2169 Input Parameter: 2170 + dm - the DM object 2171 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 2172 2173 Level: intermediate 2174 2175 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2176 DMSetJacobian() 2177 2178 @*/ 2179 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2180 { 2181 PetscFunctionBegin; 2182 dm->ops->computevariablebounds = f; 2183 PetscFunctionReturn(0); 2184 } 2185 2186 #undef __FUNCT__ 2187 #define __FUNCT__ "DMHasVariableBounds" 2188 /*@ 2189 DMHasVariableBounds - does the DM object have a variable bounds function? 2190 2191 Not Collective 2192 2193 Input Parameter: 2194 . dm - the DM object to destroy 2195 2196 Output Parameter: 2197 . flg - PETSC_TRUE if the variable bounds function exists 2198 2199 Level: developer 2200 2201 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2202 2203 @*/ 2204 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2205 { 2206 PetscFunctionBegin; 2207 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2208 PetscFunctionReturn(0); 2209 } 2210 2211 #undef __FUNCT__ 2212 #define __FUNCT__ "DMComputeVariableBounds" 2213 /*@C 2214 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2215 2216 Logically Collective on DM 2217 2218 Input Parameters: 2219 + dm - the DM object to destroy 2220 - x - current solution at which the bounds are computed 2221 2222 Output parameters: 2223 + xl - lower bound 2224 - xu - upper bound 2225 2226 Level: intermediate 2227 2228 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2229 DMSetFunction(), DMSetVariableBounds() 2230 2231 @*/ 2232 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2233 { 2234 PetscErrorCode ierr; 2235 PetscFunctionBegin; 2236 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2237 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2238 if (dm->ops->computevariablebounds) { 2239 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2240 } 2241 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2242 PetscFunctionReturn(0); 2243 } 2244 2245 #undef __FUNCT__ 2246 #define __FUNCT__ "DMComputeInitialGuess" 2247 /*@ 2248 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2249 2250 Collective on DM 2251 2252 Input Parameter: 2253 + dm - the DM object to destroy 2254 - x - the vector to hold the initial guess values 2255 2256 Level: developer 2257 2258 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2259 2260 @*/ 2261 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2262 { 2263 PetscErrorCode ierr; 2264 PetscFunctionBegin; 2265 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2266 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2267 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2268 PetscFunctionReturn(0); 2269 } 2270 2271 #undef __FUNCT__ 2272 #define __FUNCT__ "DMHasInitialGuess" 2273 /*@ 2274 DMHasInitialGuess - does the DM object have an initial guess function 2275 2276 Not Collective 2277 2278 Input Parameter: 2279 . dm - the DM object to destroy 2280 2281 Output Parameter: 2282 . flg - PETSC_TRUE if function exists 2283 2284 Level: developer 2285 2286 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2287 2288 @*/ 2289 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2290 { 2291 PetscFunctionBegin; 2292 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2293 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2294 PetscFunctionReturn(0); 2295 } 2296 2297 #undef __FUNCT__ 2298 #define __FUNCT__ "DMHasFunction" 2299 /*@ 2300 DMHasFunction - does the DM object have a function 2301 2302 Not Collective 2303 2304 Input Parameter: 2305 . dm - the DM object to destroy 2306 2307 Output Parameter: 2308 . flg - PETSC_TRUE if function exists 2309 2310 Level: developer 2311 2312 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2313 2314 @*/ 2315 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2316 { 2317 PetscFunctionBegin; 2318 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2319 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2320 PetscFunctionReturn(0); 2321 } 2322 2323 #undef __FUNCT__ 2324 #define __FUNCT__ "DMHasJacobian" 2325 /*@ 2326 DMHasJacobian - does the DM object have a matrix function 2327 2328 Not Collective 2329 2330 Input Parameter: 2331 . dm - the DM object to destroy 2332 2333 Output Parameter: 2334 . flg - PETSC_TRUE if function exists 2335 2336 Level: developer 2337 2338 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2339 2340 @*/ 2341 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2342 { 2343 PetscFunctionBegin; 2344 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2345 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2346 PetscFunctionReturn(0); 2347 } 2348 2349 #undef __FUNCT__ 2350 #define __FUNCT__ "DMHasColoring" 2351 /*@ 2352 DMHasColoring - does the DM object have a method of providing a coloring? 2353 2354 Not Collective 2355 2356 Input Parameter: 2357 . dm - the DM object 2358 2359 Output Parameter: 2360 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2361 2362 Level: developer 2363 2364 .seealso DMHasFunction(), DMCreateColoring() 2365 2366 @*/ 2367 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2368 { 2369 PetscFunctionBegin; 2370 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2371 PetscFunctionReturn(0); 2372 } 2373 2374 #undef __FUNCT__ 2375 #define __FUNCT__ "DMSetVec" 2376 /*@C 2377 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2378 2379 Collective on DM 2380 2381 Input Parameter: 2382 + dm - the DM object 2383 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2384 2385 Level: developer 2386 2387 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2388 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2389 2390 @*/ 2391 PetscErrorCode DMSetVec(DM dm,Vec x) 2392 { 2393 PetscErrorCode ierr; 2394 PetscFunctionBegin; 2395 if (x) { 2396 if (!dm->x) { 2397 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2398 } 2399 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2400 } 2401 else if (dm->x) { 2402 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2403 } 2404 PetscFunctionReturn(0); 2405 } 2406 2407 2408 #undef __FUNCT__ 2409 #define __FUNCT__ "DMComputeFunction" 2410 /*@ 2411 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2412 2413 Collective on DM 2414 2415 Input Parameter: 2416 + dm - the DM object to destroy 2417 . x - the location where the function is evaluationed, may be ignored for linear problems 2418 - b - the vector to hold the right hand side entries 2419 2420 Level: developer 2421 2422 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2423 DMSetJacobian() 2424 2425 @*/ 2426 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2427 { 2428 PetscErrorCode ierr; 2429 PetscFunctionBegin; 2430 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2431 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2432 PetscStackPush("DM user function"); 2433 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2434 PetscStackPop; 2435 PetscFunctionReturn(0); 2436 } 2437 2438 2439 2440 #undef __FUNCT__ 2441 #define __FUNCT__ "DMComputeJacobian" 2442 /*@ 2443 DMComputeJacobian - compute the matrix entries for the solver 2444 2445 Collective on DM 2446 2447 Input Parameter: 2448 + dm - the DM object 2449 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2450 . A - matrix that defines the operator for the linear solve 2451 - B - the matrix used to construct the preconditioner 2452 2453 Level: developer 2454 2455 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2456 DMSetFunction() 2457 2458 @*/ 2459 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2460 { 2461 PetscErrorCode ierr; 2462 2463 PetscFunctionBegin; 2464 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2465 if (!dm->ops->jacobian) { 2466 ISColoring coloring; 2467 MatFDColoring fd; 2468 MatType mtype; 2469 2470 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2471 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2472 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2473 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2474 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2475 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2476 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2477 2478 dm->fd = fd; 2479 dm->ops->jacobian = DMComputeJacobianDefault; 2480 2481 /* don't know why this is needed */ 2482 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2483 } 2484 if (!x) x = dm->x; 2485 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2486 2487 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2488 if (x) { 2489 if (!dm->x) { 2490 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2491 } 2492 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2493 } 2494 if (A != B) { 2495 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2496 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2497 } 2498 PetscFunctionReturn(0); 2499 } 2500 2501 2502 PetscFList DMList = PETSC_NULL; 2503 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2504 2505 #undef __FUNCT__ 2506 #define __FUNCT__ "DMSetType" 2507 /*@C 2508 DMSetType - Builds a DM, for a particular DM implementation. 2509 2510 Collective on DM 2511 2512 Input Parameters: 2513 + dm - The DM object 2514 - method - The name of the DM type 2515 2516 Options Database Key: 2517 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2518 2519 Notes: 2520 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2521 2522 Level: intermediate 2523 2524 .keywords: DM, set, type 2525 .seealso: DMGetType(), DMCreate() 2526 @*/ 2527 PetscErrorCode DMSetType(DM dm, DMType method) 2528 { 2529 PetscErrorCode (*r)(DM); 2530 PetscBool match; 2531 PetscErrorCode ierr; 2532 2533 PetscFunctionBegin; 2534 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2535 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2536 if (match) PetscFunctionReturn(0); 2537 2538 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2539 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2540 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2541 2542 if (dm->ops->destroy) { 2543 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2544 dm->ops->destroy = PETSC_NULL; 2545 } 2546 ierr = (*r)(dm);CHKERRQ(ierr); 2547 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2548 PetscFunctionReturn(0); 2549 } 2550 2551 #undef __FUNCT__ 2552 #define __FUNCT__ "DMGetType" 2553 /*@C 2554 DMGetType - Gets the DM type name (as a string) from the DM. 2555 2556 Not Collective 2557 2558 Input Parameter: 2559 . dm - The DM 2560 2561 Output Parameter: 2562 . type - The DM type name 2563 2564 Level: intermediate 2565 2566 .keywords: DM, get, type, name 2567 .seealso: DMSetType(), DMCreate() 2568 @*/ 2569 PetscErrorCode DMGetType(DM dm, DMType *type) 2570 { 2571 PetscErrorCode ierr; 2572 2573 PetscFunctionBegin; 2574 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2575 PetscValidCharPointer(type,2); 2576 if (!DMRegisterAllCalled) { 2577 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2578 } 2579 *type = ((PetscObject)dm)->type_name; 2580 PetscFunctionReturn(0); 2581 } 2582 2583 #undef __FUNCT__ 2584 #define __FUNCT__ "DMConvert" 2585 /*@C 2586 DMConvert - Converts a DM to another DM, either of the same or different type. 2587 2588 Collective on DM 2589 2590 Input Parameters: 2591 + dm - the DM 2592 - newtype - new DM type (use "same" for the same type) 2593 2594 Output Parameter: 2595 . M - pointer to new DM 2596 2597 Notes: 2598 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2599 the MPI communicator of the generated DM is always the same as the communicator 2600 of the input DM. 2601 2602 Level: intermediate 2603 2604 .seealso: DMCreate() 2605 @*/ 2606 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2607 { 2608 DM B; 2609 char convname[256]; 2610 PetscBool sametype, issame; 2611 PetscErrorCode ierr; 2612 2613 PetscFunctionBegin; 2614 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2615 PetscValidType(dm,1); 2616 PetscValidPointer(M,3); 2617 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2618 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2619 { 2620 PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL; 2621 2622 /* 2623 Order of precedence: 2624 1) See if a specialized converter is known to the current DM. 2625 2) See if a specialized converter is known to the desired DM class. 2626 3) See if a good general converter is registered for the desired class 2627 4) See if a good general converter is known for the current matrix. 2628 5) Use a really basic converter. 2629 */ 2630 2631 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2632 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2633 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2634 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2635 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2636 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2637 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2638 if (conv) goto foundconv; 2639 2640 /* 2) See if a specialized converter is known to the desired DM class. */ 2641 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2642 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2643 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2644 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2645 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2646 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2647 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2648 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2649 if (conv) { 2650 ierr = DMDestroy(&B);CHKERRQ(ierr); 2651 goto foundconv; 2652 } 2653 2654 #if 0 2655 /* 3) See if a good general converter is registered for the desired class */ 2656 conv = B->ops->convertfrom; 2657 ierr = DMDestroy(&B);CHKERRQ(ierr); 2658 if (conv) goto foundconv; 2659 2660 /* 4) See if a good general converter is known for the current matrix */ 2661 if (dm->ops->convert) { 2662 conv = dm->ops->convert; 2663 } 2664 if (conv) goto foundconv; 2665 #endif 2666 2667 /* 5) Use a really basic converter. */ 2668 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2669 2670 foundconv: 2671 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2672 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2673 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2674 } 2675 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2676 PetscFunctionReturn(0); 2677 } 2678 2679 /*--------------------------------------------------------------------------------------------------------------------*/ 2680 2681 #undef __FUNCT__ 2682 #define __FUNCT__ "DMRegister" 2683 /*@C 2684 DMRegister - See DMRegisterDynamic() 2685 2686 Level: advanced 2687 @*/ 2688 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2689 { 2690 char fullname[PETSC_MAX_PATH_LEN]; 2691 PetscErrorCode ierr; 2692 2693 PetscFunctionBegin; 2694 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2695 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2696 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2697 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2698 PetscFunctionReturn(0); 2699 } 2700 2701 2702 /*--------------------------------------------------------------------------------------------------------------------*/ 2703 #undef __FUNCT__ 2704 #define __FUNCT__ "DMRegisterDestroy" 2705 /*@C 2706 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2707 2708 Not Collective 2709 2710 Level: advanced 2711 2712 .keywords: DM, register, destroy 2713 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2714 @*/ 2715 PetscErrorCode DMRegisterDestroy(void) 2716 { 2717 PetscErrorCode ierr; 2718 2719 PetscFunctionBegin; 2720 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2721 DMRegisterAllCalled = PETSC_FALSE; 2722 PetscFunctionReturn(0); 2723 } 2724 2725 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2726 #include <mex.h> 2727 2728 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2729 2730 #undef __FUNCT__ 2731 #define __FUNCT__ "DMComputeFunction_Matlab" 2732 /* 2733 DMComputeFunction_Matlab - Calls the function that has been set with 2734 DMSetFunctionMatlab(). 2735 2736 For linear problems x is null 2737 2738 .seealso: DMSetFunction(), DMGetFunction() 2739 */ 2740 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2741 { 2742 PetscErrorCode ierr; 2743 DMMatlabContext *sctx; 2744 int nlhs = 1,nrhs = 4; 2745 mxArray *plhs[1],*prhs[4]; 2746 long long int lx = 0,ly = 0,ls = 0; 2747 2748 PetscFunctionBegin; 2749 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2750 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2751 PetscCheckSameComm(dm,1,y,3); 2752 2753 /* call Matlab function in ctx with arguments x and y */ 2754 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2755 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2756 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2757 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2758 prhs[0] = mxCreateDoubleScalar((double)ls); 2759 prhs[1] = mxCreateDoubleScalar((double)lx); 2760 prhs[2] = mxCreateDoubleScalar((double)ly); 2761 prhs[3] = mxCreateString(sctx->funcname); 2762 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2763 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2764 mxDestroyArray(prhs[0]); 2765 mxDestroyArray(prhs[1]); 2766 mxDestroyArray(prhs[2]); 2767 mxDestroyArray(prhs[3]); 2768 mxDestroyArray(plhs[0]); 2769 PetscFunctionReturn(0); 2770 } 2771 2772 2773 #undef __FUNCT__ 2774 #define __FUNCT__ "DMSetFunctionMatlab" 2775 /* 2776 DMSetFunctionMatlab - Sets the function evaluation routine 2777 2778 */ 2779 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2780 { 2781 PetscErrorCode ierr; 2782 DMMatlabContext *sctx; 2783 2784 PetscFunctionBegin; 2785 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2786 /* currently sctx is memory bleed */ 2787 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2788 if (!sctx) { 2789 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2790 } 2791 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2792 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2793 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2794 PetscFunctionReturn(0); 2795 } 2796 2797 #undef __FUNCT__ 2798 #define __FUNCT__ "DMComputeJacobian_Matlab" 2799 /* 2800 DMComputeJacobian_Matlab - Calls the function that has been set with 2801 DMSetJacobianMatlab(). 2802 2803 For linear problems x is null 2804 2805 .seealso: DMSetFunction(), DMGetFunction() 2806 */ 2807 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2808 { 2809 PetscErrorCode ierr; 2810 DMMatlabContext *sctx; 2811 int nlhs = 2,nrhs = 5; 2812 mxArray *plhs[2],*prhs[5]; 2813 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2814 2815 PetscFunctionBegin; 2816 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2817 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2818 2819 /* call MATLAB function in ctx with arguments x, A, and B */ 2820 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2821 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2822 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2823 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2824 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2825 prhs[0] = mxCreateDoubleScalar((double)ls); 2826 prhs[1] = mxCreateDoubleScalar((double)lx); 2827 prhs[2] = mxCreateDoubleScalar((double)lA); 2828 prhs[3] = mxCreateDoubleScalar((double)lB); 2829 prhs[4] = mxCreateString(sctx->jacname); 2830 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2831 *str = (MatStructure) mxGetScalar(plhs[0]); 2832 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2833 mxDestroyArray(prhs[0]); 2834 mxDestroyArray(prhs[1]); 2835 mxDestroyArray(prhs[2]); 2836 mxDestroyArray(prhs[3]); 2837 mxDestroyArray(prhs[4]); 2838 mxDestroyArray(plhs[0]); 2839 mxDestroyArray(plhs[1]); 2840 PetscFunctionReturn(0); 2841 } 2842 2843 2844 #undef __FUNCT__ 2845 #define __FUNCT__ "DMSetJacobianMatlab" 2846 /* 2847 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2848 2849 */ 2850 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2851 { 2852 PetscErrorCode ierr; 2853 DMMatlabContext *sctx; 2854 2855 PetscFunctionBegin; 2856 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2857 /* currently sctx is memory bleed */ 2858 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2859 if (!sctx) { 2860 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2861 } 2862 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2863 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2864 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2865 PetscFunctionReturn(0); 2866 } 2867 #endif 2868 2869 #undef __FUNCT__ 2870 #define __FUNCT__ "DMLoad" 2871 /*@C 2872 DMLoad - Loads a DM that has been stored in binary or HDF5 format 2873 with DMView(). 2874 2875 Collective on PetscViewer 2876 2877 Input Parameters: 2878 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2879 some related function before a call to DMLoad(). 2880 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2881 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2882 2883 Level: intermediate 2884 2885 Notes: 2886 Defaults to the DM DA. 2887 2888 Notes for advanced users: 2889 Most users should not need to know the details of the binary storage 2890 format, since DMLoad() and DMView() completely hide these details. 2891 But for anyone who's interested, the standard binary matrix storage 2892 format is 2893 .vb 2894 has not yet been determined 2895 .ve 2896 2897 In addition, PETSc automatically does the byte swapping for 2898 machines that store the bytes reversed, e.g. DEC alpha, freebsd, 2899 linux, Windows and the paragon; thus if you write your own binary 2900 read/write routines you have to swap the bytes; see PetscBinaryRead() 2901 and PetscBinaryWrite() to see how this may be done. 2902 2903 Concepts: vector^loading from file 2904 2905 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2906 @*/ 2907 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2908 { 2909 PetscErrorCode ierr; 2910 2911 PetscFunctionBegin; 2912 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2913 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2914 2915 if (!((PetscObject)newdm)->type_name) { 2916 ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr); 2917 } 2918 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2919 PetscFunctionReturn(0); 2920 } 2921 2922 /******************************** FEM Support **********************************/ 2923 2924 #undef __FUNCT__ 2925 #define __FUNCT__ "DMPrintCellVector" 2926 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 2927 PetscInt f; 2928 PetscErrorCode ierr; 2929 2930 PetscFunctionBegin; 2931 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2932 for (f = 0; f < len; ++f) { 2933 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2934 } 2935 PetscFunctionReturn(0); 2936 } 2937 2938 #undef __FUNCT__ 2939 #define __FUNCT__ "DMPrintCellMatrix" 2940 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 2941 PetscInt f, g; 2942 PetscErrorCode ierr; 2943 2944 PetscFunctionBegin; 2945 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2946 for (f = 0; f < rows; ++f) { 2947 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2948 for (g = 0; g < cols; ++g) { 2949 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2950 } 2951 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2952 } 2953 PetscFunctionReturn(0); 2954 } 2955 2956 #undef __FUNCT__ 2957 #define __FUNCT__ "DMGetLocalFunction" 2958 /*@C 2959 DMGetLocalFunction - Get the local residual function from this DM 2960 2961 Not collective 2962 2963 Input Parameter: 2964 . dm - The DM 2965 2966 Output Parameter: 2967 . lf - The local residual function 2968 2969 Calling sequence of lf: 2970 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2971 2972 + snes - the SNES context 2973 . x - local vector with the state at which to evaluate residual 2974 . f - local vector to put residual in 2975 - ctx - optional user-defined function context 2976 2977 Level: intermediate 2978 2979 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2980 @*/ 2981 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 2982 { 2983 PetscFunctionBegin; 2984 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2985 if (lf) *lf = dm->lf; 2986 PetscFunctionReturn(0); 2987 } 2988 2989 #undef __FUNCT__ 2990 #define __FUNCT__ "DMSetLocalFunction" 2991 /*@C 2992 DMSetLocalFunction - Set the local residual function from this DM 2993 2994 Not collective 2995 2996 Input Parameters: 2997 + dm - The DM 2998 - lf - The local residual function 2999 3000 Calling sequence of lf: 3001 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3002 3003 + snes - the SNES context 3004 . x - local vector with the state at which to evaluate residual 3005 . f - local vector to put residual in 3006 - ctx - optional user-defined function context 3007 3008 Level: intermediate 3009 3010 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3011 @*/ 3012 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 3013 { 3014 PetscFunctionBegin; 3015 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3016 dm->lf = lf; 3017 PetscFunctionReturn(0); 3018 } 3019 3020 #undef __FUNCT__ 3021 #define __FUNCT__ "DMGetLocalJacobian" 3022 /*@C 3023 DMGetLocalJacobian - Get the local Jacobian function from this DM 3024 3025 Not collective 3026 3027 Input Parameter: 3028 . dm - The DM 3029 3030 Output Parameter: 3031 . lj - The local Jacobian function 3032 3033 Calling sequence of lj: 3034 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3035 3036 + snes - the SNES context 3037 . x - local vector with the state at which to evaluate residual 3038 . J - matrix to put Jacobian in 3039 . M - matrix to use for defining Jacobian preconditioner 3040 - ctx - optional user-defined function context 3041 3042 Level: intermediate 3043 3044 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3045 @*/ 3046 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 3047 { 3048 PetscFunctionBegin; 3049 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3050 if (lj) *lj = dm->lj; 3051 PetscFunctionReturn(0); 3052 } 3053 3054 #undef __FUNCT__ 3055 #define __FUNCT__ "DMSetLocalJacobian" 3056 /*@C 3057 DMSetLocalJacobian - Set the local Jacobian function from this DM 3058 3059 Not collective 3060 3061 Input Parameters: 3062 + dm - The DM 3063 - lj - The local Jacobian function 3064 3065 Calling sequence of lj: 3066 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3067 3068 + snes - the SNES context 3069 . x - local vector with the state at which to evaluate residual 3070 . J - matrix to put Jacobian in 3071 . M - matrix to use for defining Jacobian preconditioner 3072 - ctx - optional user-defined function context 3073 3074 Level: intermediate 3075 3076 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3077 @*/ 3078 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 3079 { 3080 PetscFunctionBegin; 3081 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3082 dm->lj = lj; 3083 PetscFunctionReturn(0); 3084 } 3085 3086 #undef __FUNCT__ 3087 #define __FUNCT__ "DMGetDefaultSection" 3088 /*@ 3089 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 3090 3091 Input Parameter: 3092 . dm - The DM 3093 3094 Output Parameter: 3095 . section - The PetscSection 3096 3097 Level: intermediate 3098 3099 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3100 3101 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3102 @*/ 3103 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 3104 PetscFunctionBegin; 3105 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3106 PetscValidPointer(section, 2); 3107 *section = dm->defaultSection; 3108 PetscFunctionReturn(0); 3109 } 3110 3111 #undef __FUNCT__ 3112 #define __FUNCT__ "DMSetDefaultSection" 3113 /*@ 3114 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 3115 3116 Input Parameters: 3117 + dm - The DM 3118 - section - The PetscSection 3119 3120 Level: intermediate 3121 3122 Note: Any existing Section will be destroyed 3123 3124 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3125 @*/ 3126 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 3127 PetscInt numFields; 3128 PetscInt f; 3129 PetscErrorCode ierr; 3130 3131 PetscFunctionBegin; 3132 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3133 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 3134 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3135 dm->defaultSection = section; 3136 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 3137 if (numFields) { 3138 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 3139 for (f = 0; f < numFields; ++f) { 3140 const char *name; 3141 3142 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 3143 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 3144 } 3145 } 3146 PetscFunctionReturn(0); 3147 } 3148 3149 #undef __FUNCT__ 3150 #define __FUNCT__ "DMGetDefaultGlobalSection" 3151 /*@ 3152 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 3153 3154 Input Parameter: 3155 . dm - The DM 3156 3157 Output Parameter: 3158 . section - The PetscSection 3159 3160 Level: intermediate 3161 3162 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3163 3164 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 3165 @*/ 3166 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 3167 PetscErrorCode ierr; 3168 3169 PetscFunctionBegin; 3170 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3171 PetscValidPointer(section, 2); 3172 if (!dm->defaultGlobalSection) { 3173 if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 3174 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 3175 } 3176 *section = dm->defaultGlobalSection; 3177 PetscFunctionReturn(0); 3178 } 3179 3180 #undef __FUNCT__ 3181 #define __FUNCT__ "DMSetDefaultGlobalSection" 3182 /*@ 3183 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 3184 3185 Input Parameters: 3186 + dm - The DM 3187 - section - The PetscSection 3188 3189 Level: intermediate 3190 3191 Note: Any existing Section will be destroyed 3192 3193 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 3194 @*/ 3195 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) { 3196 PetscErrorCode ierr; 3197 3198 PetscFunctionBegin; 3199 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3200 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3201 dm->defaultGlobalSection = section; 3202 PetscFunctionReturn(0); 3203 } 3204 3205 #undef __FUNCT__ 3206 #define __FUNCT__ "DMGetDefaultSF" 3207 /*@ 3208 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3209 it is created from the default PetscSection layouts in the DM. 3210 3211 Input Parameter: 3212 . dm - The DM 3213 3214 Output Parameter: 3215 . sf - The PetscSF 3216 3217 Level: intermediate 3218 3219 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3220 3221 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3222 @*/ 3223 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3224 PetscInt nroots; 3225 PetscErrorCode ierr; 3226 3227 PetscFunctionBegin; 3228 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3229 PetscValidPointer(sf, 2); 3230 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3231 if (nroots < 0) { 3232 PetscSection section, gSection; 3233 3234 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3235 if (section) { 3236 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3237 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3238 } else { 3239 *sf = PETSC_NULL; 3240 PetscFunctionReturn(0); 3241 } 3242 } 3243 *sf = dm->defaultSF; 3244 PetscFunctionReturn(0); 3245 } 3246 3247 #undef __FUNCT__ 3248 #define __FUNCT__ "DMSetDefaultSF" 3249 /*@ 3250 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3251 3252 Input Parameters: 3253 + dm - The DM 3254 - sf - The PetscSF 3255 3256 Level: intermediate 3257 3258 Note: Any previous SF is destroyed 3259 3260 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3261 @*/ 3262 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3263 PetscErrorCode ierr; 3264 3265 PetscFunctionBegin; 3266 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3267 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3268 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3269 dm->defaultSF = sf; 3270 PetscFunctionReturn(0); 3271 } 3272 3273 #undef __FUNCT__ 3274 #define __FUNCT__ "DMCreateDefaultSF" 3275 /*@C 3276 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3277 describing the data layout. 3278 3279 Input Parameters: 3280 + dm - The DM 3281 . localSection - PetscSection describing the local data layout 3282 - globalSection - PetscSection describing the global data layout 3283 3284 Level: intermediate 3285 3286 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3287 @*/ 3288 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3289 { 3290 MPI_Comm comm = ((PetscObject) dm)->comm; 3291 PetscLayout layout; 3292 const PetscInt *ranges; 3293 PetscInt *local; 3294 PetscSFNode *remote; 3295 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3296 PetscMPIInt size, rank; 3297 PetscErrorCode ierr; 3298 3299 PetscFunctionBegin; 3300 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3301 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3302 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3303 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3304 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3305 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3306 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3307 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3308 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3309 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3310 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 3311 PetscInt gdof, gcdof; 3312 3313 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3314 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3315 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3316 } 3317 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3318 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3319 for (p = pStart, l = 0; p < pEnd; ++p) { 3320 const PetscInt *cind; 3321 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3322 3323 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3324 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3325 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3326 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3327 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3328 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3329 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3330 if (!gdof) continue; /* Censored point */ 3331 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3332 if (gsize != dof-cdof) { 3333 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", size, p, dof-cdof, dof); 3334 cdof = 0; /* Ignore constraints */ 3335 } 3336 for (d = 0, c = 0; d < dof; ++d) { 3337 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3338 local[l+d-c] = off+d; 3339 } 3340 if (gdof < 0) { 3341 for(d = 0; d < gsize; ++d, ++l) { 3342 PetscInt offset = -(goff+1) + d, r; 3343 3344 for (r = 0; r < size; ++r) { 3345 if ((offset >= ranges[r]) && (offset < ranges[r+1])) break; 3346 } 3347 remote[l].rank = r; 3348 remote[l].index = offset - ranges[r]; 3349 } 3350 } else { 3351 for(d = 0; d < gsize; ++d, ++l) { 3352 remote[l].rank = rank; 3353 remote[l].index = goff+d - ranges[rank]; 3354 } 3355 } 3356 } 3357 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3358 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3359 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3360 PetscFunctionReturn(0); 3361 } 3362 3363 #undef __FUNCT__ 3364 #define __FUNCT__ "DMGetPointSF" 3365 /*@ 3366 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3367 3368 Input Parameter: 3369 . dm - The DM 3370 3371 Output Parameter: 3372 . sf - The PetscSF 3373 3374 Level: intermediate 3375 3376 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3377 3378 .seealso: DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3379 @*/ 3380 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) { 3381 PetscFunctionBegin; 3382 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3383 PetscValidPointer(sf, 2); 3384 *sf = dm->sf; 3385 PetscFunctionReturn(0); 3386 } 3387 3388 #undef __FUNCT__ 3389 #define __FUNCT__ "DMGetNumFields" 3390 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3391 { 3392 PetscFunctionBegin; 3393 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3394 PetscValidPointer(numFields, 2); 3395 *numFields = dm->numFields; 3396 PetscFunctionReturn(0); 3397 } 3398 3399 #undef __FUNCT__ 3400 #define __FUNCT__ "DMSetNumFields" 3401 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3402 { 3403 PetscInt f; 3404 PetscErrorCode ierr; 3405 3406 PetscFunctionBegin; 3407 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3408 for (f = 0; f < dm->numFields; ++f) { 3409 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3410 } 3411 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3412 dm->numFields = numFields; 3413 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3414 for (f = 0; f < dm->numFields; ++f) { 3415 ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3416 } 3417 PetscFunctionReturn(0); 3418 } 3419 3420 #undef __FUNCT__ 3421 #define __FUNCT__ "DMGetField" 3422 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3423 { 3424 PetscFunctionBegin; 3425 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3426 PetscValidPointer(field, 2); 3427 if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3428 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3429 *field = dm->fields[f]; 3430 PetscFunctionReturn(0); 3431 } 3432 3433 #undef __FUNCT__ 3434 #define __FUNCT__ "DMSetCoordinates" 3435 /*@ 3436 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3437 3438 Collective on DM 3439 3440 Input Parameters: 3441 + dm - the DM 3442 - c - coordinate vector 3443 3444 Note: 3445 The coordinates do include those for ghost points, which are in the local vector 3446 3447 Level: intermediate 3448 3449 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3450 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3451 @*/ 3452 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3453 { 3454 PetscErrorCode ierr; 3455 3456 PetscFunctionBegin; 3457 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3458 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3459 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3460 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3461 dm->coordinates = c; 3462 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3463 PetscFunctionReturn(0); 3464 } 3465 3466 #undef __FUNCT__ 3467 #define __FUNCT__ "DMSetCoordinatesLocal" 3468 /*@ 3469 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3470 3471 Collective on DM 3472 3473 Input Parameters: 3474 + dm - the DM 3475 - c - coordinate vector 3476 3477 Note: 3478 The coordinates of ghost points can be set using DMSetCoordinates() 3479 followed by DMGetCoordinatesLocal(). This is intended to enable the 3480 setting of ghost coordinates outside of the domain. 3481 3482 Level: intermediate 3483 3484 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3485 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3486 @*/ 3487 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3488 { 3489 PetscErrorCode ierr; 3490 3491 PetscFunctionBegin; 3492 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3493 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3494 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3495 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3496 dm->coordinatesLocal = c; 3497 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3498 PetscFunctionReturn(0); 3499 } 3500 3501 #undef __FUNCT__ 3502 #define __FUNCT__ "DMGetCoordinates" 3503 /*@ 3504 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3505 3506 Not Collective 3507 3508 Input Parameter: 3509 . dm - the DM 3510 3511 Output Parameter: 3512 . c - global coordinate vector 3513 3514 Note: 3515 This is a borrowed reference, so the user should NOT destroy this vector 3516 3517 Each process has only the local coordinates (does NOT have the ghost coordinates). 3518 3519 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3520 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3521 3522 Level: intermediate 3523 3524 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3525 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3526 @*/ 3527 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3528 { 3529 PetscErrorCode ierr; 3530 3531 PetscFunctionBegin; 3532 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3533 PetscValidPointer(c,2); 3534 if (!dm->coordinates && dm->coordinatesLocal) { 3535 DM cdm; 3536 3537 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3538 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3539 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3540 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3541 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3542 } 3543 *c = dm->coordinates; 3544 PetscFunctionReturn(0); 3545 } 3546 3547 #undef __FUNCT__ 3548 #define __FUNCT__ "DMGetCoordinatesLocal" 3549 /*@ 3550 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3551 3552 Collective on DM 3553 3554 Input Parameter: 3555 . dm - the DM 3556 3557 Output Parameter: 3558 . c - coordinate vector 3559 3560 Note: 3561 This is a borrowed reference, so the user should NOT destroy this vector 3562 3563 Each process has the local and ghost coordinates 3564 3565 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3566 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3567 3568 Level: intermediate 3569 3570 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3571 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3572 @*/ 3573 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3574 { 3575 PetscErrorCode ierr; 3576 3577 PetscFunctionBegin; 3578 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3579 PetscValidPointer(c,2); 3580 if (!dm->coordinatesLocal && dm->coordinates) { 3581 DM cdm; 3582 3583 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3584 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3585 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3586 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3587 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3588 } 3589 *c = dm->coordinatesLocal; 3590 PetscFunctionReturn(0); 3591 } 3592 3593 #undef __FUNCT__ 3594 #define __FUNCT__ "DMGetCoordinateDM" 3595 /*@ 3596 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3597 3598 Collective on DM 3599 3600 Input Parameter: 3601 . dm - the DM 3602 3603 Output Parameter: 3604 . cdm - coordinate DM 3605 3606 Level: intermediate 3607 3608 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3609 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3610 @*/ 3611 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3612 { 3613 PetscErrorCode ierr; 3614 3615 PetscFunctionBegin; 3616 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3617 PetscValidPointer(cdm,2); 3618 if (!dm->coordinateDM) { 3619 if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3620 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3621 } 3622 *cdm = dm->coordinateDM; 3623 PetscFunctionReturn(0); 3624 } 3625