1 #include <petscsnes.h> 2 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = PETSC_NULL; 35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 36 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 39 #endif 40 41 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 42 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 43 44 45 v->ltogmap = PETSC_NULL; 46 v->ltogmapb = PETSC_NULL; 47 v->bs = 1; 48 v->coloringtype = IS_COLORING_GLOBAL; 49 v->lf = PETSC_NULL; 50 v->lj = PETSC_NULL; 51 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 52 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 53 v->defaultSection = PETSC_NULL; 54 v->defaultGlobalSection = PETSC_NULL; 55 { 56 PetscInt i; 57 for (i = 0; i < 10; ++i) { 58 v->nullspaceConstructors[i] = PETSC_NULL; 59 } 60 } 61 v->numFields = 0; 62 v->fields = PETSC_NULL; 63 64 *dm = v; 65 PetscFunctionReturn(0); 66 } 67 68 #undef __FUNCT__ 69 #define __FUNCT__ "DMSetVecType" 70 /*@C 71 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 72 73 Logically Collective on DMDA 74 75 Input Parameter: 76 + da - initial distributed array 77 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 78 79 Options Database: 80 . -dm_vec_type ctype 81 82 Level: intermediate 83 84 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 85 @*/ 86 PetscErrorCode DMSetVecType(DM da,VecType ctype) 87 { 88 PetscErrorCode ierr; 89 90 PetscFunctionBegin; 91 PetscValidHeaderSpecific(da,DM_CLASSID,1); 92 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 93 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 94 PetscFunctionReturn(0); 95 } 96 97 #undef __FUNCT__ 98 #define __FUNCT__ "VecGetDM" 99 /*@ 100 VecSetDM - Gets the DM defining the data layout of the vector 101 102 Not collective 103 104 Input Parameter: 105 . v - The Vec 106 107 Output Parameter: 108 . dm - The DM 109 110 Level: intermediate 111 112 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 113 @*/ 114 PetscErrorCode VecGetDM(Vec v, DM *dm) 115 { 116 PetscErrorCode ierr; 117 118 PetscFunctionBegin; 119 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 120 PetscValidPointer(dm,2); 121 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr); 122 PetscFunctionReturn(0); 123 } 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "VecSetDM" 127 /*@ 128 VecSetDM - Sets the DM defining the data layout of the vector 129 130 Not collective 131 132 Input Parameters: 133 + v - The Vec 134 - dm - The DM 135 136 Level: intermediate 137 138 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 139 @*/ 140 PetscErrorCode VecSetDM(Vec v, DM dm) 141 { 142 PetscErrorCode ierr; 143 144 PetscFunctionBegin; 145 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 146 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 147 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 148 PetscFunctionReturn(0); 149 } 150 151 #undef __FUNCT__ 152 #define __FUNCT__ "DMSetMatType" 153 /*@C 154 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 155 156 Logically Collective on DM 157 158 Input Parameter: 159 + dm - the DM context 160 . ctype - the matrix type 161 162 Options Database: 163 . -dm_mat_type ctype 164 165 Level: intermediate 166 167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 168 @*/ 169 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 170 { 171 PetscErrorCode ierr; 172 PetscFunctionBegin; 173 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 174 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 175 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 176 PetscFunctionReturn(0); 177 } 178 179 #undef __FUNCT__ 180 #define __FUNCT__ "MatGetDM" 181 /*@ 182 MatSetDM - Gets the DM defining the data layout of the matrix 183 184 Not collective 185 186 Input Parameter: 187 . A - The Mat 188 189 Output Parameter: 190 . dm - The DM 191 192 Level: intermediate 193 194 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 195 @*/ 196 PetscErrorCode MatGetDM(Mat A, DM *dm) 197 { 198 PetscErrorCode ierr; 199 200 PetscFunctionBegin; 201 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 202 PetscValidPointer(dm,2); 203 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr); 204 PetscFunctionReturn(0); 205 } 206 207 #undef __FUNCT__ 208 #define __FUNCT__ "MatSetDM" 209 /*@ 210 MatSetDM - Sets the DM defining the data layout of the matrix 211 212 Not collective 213 214 Input Parameters: 215 + A - The Mat 216 - dm - The DM 217 218 Level: intermediate 219 220 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 221 @*/ 222 PetscErrorCode MatSetDM(Mat A, DM dm) 223 { 224 PetscErrorCode ierr; 225 226 PetscFunctionBegin; 227 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 228 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 229 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 230 PetscFunctionReturn(0); 231 } 232 233 #undef __FUNCT__ 234 #define __FUNCT__ "DMSetOptionsPrefix" 235 /*@C 236 DMSetOptionsPrefix - Sets the prefix used for searching for all 237 DMDA options in the database. 238 239 Logically Collective on DMDA 240 241 Input Parameter: 242 + da - the DMDA context 243 - prefix - the prefix to prepend to all option names 244 245 Notes: 246 A hyphen (-) must NOT be given at the beginning of the prefix name. 247 The first character of all runtime options is AUTOMATICALLY the hyphen. 248 249 Level: advanced 250 251 .keywords: DMDA, set, options, prefix, database 252 253 .seealso: DMSetFromOptions() 254 @*/ 255 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 256 { 257 PetscErrorCode ierr; 258 259 PetscFunctionBegin; 260 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 261 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 262 PetscFunctionReturn(0); 263 } 264 265 #undef __FUNCT__ 266 #define __FUNCT__ "DMDestroy" 267 /*@ 268 DMDestroy - Destroys a vector packer or DMDA. 269 270 Collective on DM 271 272 Input Parameter: 273 . dm - the DM object to destroy 274 275 Level: developer 276 277 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 278 279 @*/ 280 PetscErrorCode DMDestroy(DM *dm) 281 { 282 PetscInt i, cnt = 0, f; 283 DMNamedVecLink nlink,nnext; 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 if (!*dm) PetscFunctionReturn(0); 288 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 289 290 /* count all the circular references of DM and its contained Vecs */ 291 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 292 if ((*dm)->localin[i]) {cnt++;} 293 if ((*dm)->globalin[i]) {cnt++;} 294 } 295 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 296 if ((*dm)->x) { 297 DM obj; 298 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 299 if (obj == *dm) cnt++; 300 } 301 302 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 303 /* 304 Need this test because the dm references the vectors that 305 reference the dm, so destroying the dm calls destroy on the 306 vectors that cause another destroy on the dm 307 */ 308 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 309 ((PetscObject) (*dm))->refct = 0; 310 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 311 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 312 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 313 } 314 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 315 nnext = nlink->next; 316 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 317 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 318 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 319 ierr = PetscFree(nlink);CHKERRQ(ierr); 320 } 321 (*dm)->namedglobal = PETSC_NULL; 322 323 /* Destroy the list of hooks */ 324 { 325 DMCoarsenHookLink link,next; 326 for (link=(*dm)->coarsenhook; link; link=next) { 327 next = link->next; 328 ierr = PetscFree(link);CHKERRQ(ierr); 329 } 330 (*dm)->coarsenhook = PETSC_NULL; 331 } 332 { 333 DMRefineHookLink link,next; 334 for (link=(*dm)->refinehook; link; link=next) { 335 next = link->next; 336 ierr = PetscFree(link);CHKERRQ(ierr); 337 } 338 (*dm)->refinehook = PETSC_NULL; 339 } 340 /* Destroy the work arrays */ 341 { 342 DMWorkLink link,next; 343 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 344 for (link=(*dm)->workin; link; link=next) { 345 next = link->next; 346 ierr = PetscFree(link->mem);CHKERRQ(ierr); 347 ierr = PetscFree(link);CHKERRQ(ierr); 348 } 349 (*dm)->workin = PETSC_NULL; 350 } 351 352 if ((*dm)->ctx && (*dm)->ctxdestroy) { 353 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 354 } 355 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 356 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 357 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 358 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 359 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 360 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 361 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 362 363 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 364 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 365 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 366 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 367 368 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 369 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 370 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 371 372 for (f = 0; f < (*dm)->numFields; ++f) { 373 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 374 } 375 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 376 /* if memory was published with AMS then destroy it */ 377 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 378 379 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 380 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 381 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 382 PetscFunctionReturn(0); 383 } 384 385 #undef __FUNCT__ 386 #define __FUNCT__ "DMSetUp" 387 /*@ 388 DMSetUp - sets up the data structures inside a DM object 389 390 Collective on DM 391 392 Input Parameter: 393 . dm - the DM object to setup 394 395 Level: developer 396 397 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 398 399 @*/ 400 PetscErrorCode DMSetUp(DM dm) 401 { 402 PetscErrorCode ierr; 403 404 PetscFunctionBegin; 405 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 406 if (dm->setupcalled) PetscFunctionReturn(0); 407 if (dm->ops->setup) { 408 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 409 } 410 dm->setupcalled = PETSC_TRUE; 411 PetscFunctionReturn(0); 412 } 413 414 #undef __FUNCT__ 415 #define __FUNCT__ "DMSetFromOptions" 416 /*@ 417 DMSetFromOptions - sets parameters in a DM from the options database 418 419 Collective on DM 420 421 Input Parameter: 422 . dm - the DM object to set options for 423 424 Options Database: 425 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 426 . -dm_vec_type <type> type of vector to create inside DM 427 . -dm_mat_type <type> type of matrix to create inside DM 428 - -dm_coloring_type <global or ghosted> 429 430 Level: developer 431 432 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 433 434 @*/ 435 PetscErrorCode DMSetFromOptions(DM dm) 436 { 437 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 438 PetscErrorCode ierr; 439 char typeName[256] = MATAIJ; 440 441 PetscFunctionBegin; 442 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 443 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 444 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 445 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 446 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 447 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 448 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 449 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 450 if (flg) { 451 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 452 } 453 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 454 if (flg) { 455 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 456 } 457 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 458 if (dm->ops->setfromoptions) { 459 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 460 } 461 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 462 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 463 ierr = PetscOptionsEnd();CHKERRQ(ierr); 464 if (flg1) { 465 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 466 } 467 if (flg2) { 468 PetscViewer viewer; 469 470 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 471 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 472 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 473 ierr = DMView(dm, viewer);CHKERRQ(ierr); 474 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 475 } 476 if (flg3) { 477 PetscViewer viewer; 478 479 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 480 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 481 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 482 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 483 ierr = DMView(dm, viewer);CHKERRQ(ierr); 484 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 485 } 486 if (flg4) { 487 PetscViewer viewer; 488 489 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 490 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 491 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 492 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 493 ierr = DMView(dm, viewer);CHKERRQ(ierr); 494 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 495 } 496 PetscFunctionReturn(0); 497 } 498 499 #undef __FUNCT__ 500 #define __FUNCT__ "DMView" 501 /*@C 502 DMView - Views a vector packer or DMDA. 503 504 Collective on DM 505 506 Input Parameter: 507 + dm - the DM object to view 508 - v - the viewer 509 510 Level: developer 511 512 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 513 514 @*/ 515 PetscErrorCode DMView(DM dm,PetscViewer v) 516 { 517 PetscErrorCode ierr; 518 519 PetscFunctionBegin; 520 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 521 if (!v) { 522 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 523 } 524 if (dm->ops->view) { 525 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 526 } 527 PetscFunctionReturn(0); 528 } 529 530 #undef __FUNCT__ 531 #define __FUNCT__ "DMCreateGlobalVector" 532 /*@ 533 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 534 535 Collective on DM 536 537 Input Parameter: 538 . dm - the DM object 539 540 Output Parameter: 541 . vec - the global vector 542 543 Level: beginner 544 545 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 546 547 @*/ 548 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 549 { 550 PetscErrorCode ierr; 551 552 PetscFunctionBegin; 553 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 554 if (dm->defaultSection) { 555 PetscSection gSection; 556 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 557 558 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 559 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 560 for(p = pStart; p < pEnd; ++p) { 561 PetscInt dof, cdof; 562 563 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 564 ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr); 565 if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof; 566 if ((dof > 0) && (dof-cdof != blockSize)) { 567 blockSize = 1; 568 break; 569 } 570 } 571 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 572 if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize); 573 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 574 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 575 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 576 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 577 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 578 ierr = VecSetDM(*vec, dm);CHKERRQ(ierr); 579 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 580 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 581 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 582 } else { 583 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 584 } 585 PetscFunctionReturn(0); 586 } 587 588 #undef __FUNCT__ 589 #define __FUNCT__ "DMCreateLocalVector" 590 /*@ 591 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 592 593 Not Collective 594 595 Input Parameter: 596 . dm - the DM object 597 598 Output Parameter: 599 . vec - the local vector 600 601 Level: beginner 602 603 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 604 605 @*/ 606 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 607 { 608 PetscErrorCode ierr; 609 610 PetscFunctionBegin; 611 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 612 if (dm->defaultSection) { 613 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 614 615 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 616 for(p = pStart; p < pEnd; ++p) { 617 PetscInt dof; 618 619 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 620 if ((blockSize < 0) && (dof > 0)) blockSize = dof; 621 if ((dof > 0) && (dof != blockSize)) { 622 blockSize = 1; 623 break; 624 } 625 } 626 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 627 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 628 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 629 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 630 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 631 ierr = VecSetDM(*vec, dm);CHKERRQ(ierr); 632 } else { 633 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 634 } 635 PetscFunctionReturn(0); 636 } 637 638 #undef __FUNCT__ 639 #define __FUNCT__ "DMGetLocalToGlobalMapping" 640 /*@ 641 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 642 643 Collective on DM 644 645 Input Parameter: 646 . dm - the DM that provides the mapping 647 648 Output Parameter: 649 . ltog - the mapping 650 651 Level: intermediate 652 653 Notes: 654 This mapping can then be used by VecSetLocalToGlobalMapping() or 655 MatSetLocalToGlobalMapping(). 656 657 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 658 @*/ 659 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 660 { 661 PetscErrorCode ierr; 662 663 PetscFunctionBegin; 664 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 665 PetscValidPointer(ltog,2); 666 if (!dm->ltogmap) { 667 PetscSection section, sectionGlobal; 668 669 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 670 if (section) { 671 PetscInt *ltog; 672 PetscInt pStart, pEnd, size, p, l; 673 674 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 675 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 676 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 677 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 678 for (p = pStart, l = 0; p < pEnd; ++p) { 679 PetscInt dof, off, c; 680 681 /* Should probably use constrained dofs */ 682 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 683 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 684 for (c = 0; c < dof; ++c, ++l) { 685 ltog[l] = off+c; 686 } 687 } 688 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 689 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 690 } else { 691 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 692 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 693 } 694 } 695 *ltog = dm->ltogmap; 696 PetscFunctionReturn(0); 697 } 698 699 #undef __FUNCT__ 700 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 701 /*@ 702 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 703 704 Collective on DM 705 706 Input Parameter: 707 . da - the distributed array that provides the mapping 708 709 Output Parameter: 710 . ltog - the block mapping 711 712 Level: intermediate 713 714 Notes: 715 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 716 MatSetLocalToGlobalMappingBlock(). 717 718 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 719 @*/ 720 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 721 { 722 PetscErrorCode ierr; 723 724 PetscFunctionBegin; 725 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 726 PetscValidPointer(ltog,2); 727 if (!dm->ltogmapb) { 728 PetscInt bs; 729 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 730 if (bs > 1) { 731 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 732 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 733 } else { 734 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 735 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 736 } 737 } 738 *ltog = dm->ltogmapb; 739 PetscFunctionReturn(0); 740 } 741 742 #undef __FUNCT__ 743 #define __FUNCT__ "DMGetBlockSize" 744 /*@ 745 DMGetBlockSize - Gets the inherent block size associated with a DM 746 747 Not Collective 748 749 Input Parameter: 750 . dm - the DM with block structure 751 752 Output Parameter: 753 . bs - the block size, 1 implies no exploitable block structure 754 755 Level: intermediate 756 757 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 758 @*/ 759 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 760 { 761 PetscFunctionBegin; 762 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 763 PetscValidPointer(bs,2); 764 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 765 *bs = dm->bs; 766 PetscFunctionReturn(0); 767 } 768 769 #undef __FUNCT__ 770 #define __FUNCT__ "DMCreateInterpolation" 771 /*@ 772 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 773 774 Collective on DM 775 776 Input Parameter: 777 + dm1 - the DM object 778 - dm2 - the second, finer DM object 779 780 Output Parameter: 781 + mat - the interpolation 782 - vec - the scaling (optional) 783 784 Level: developer 785 786 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 787 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 788 789 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 790 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 791 792 793 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 794 795 @*/ 796 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 797 { 798 PetscErrorCode ierr; 799 800 PetscFunctionBegin; 801 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 802 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 803 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 804 PetscFunctionReturn(0); 805 } 806 807 #undef __FUNCT__ 808 #define __FUNCT__ "DMCreateInjection" 809 /*@ 810 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 811 812 Collective on DM 813 814 Input Parameter: 815 + dm1 - the DM object 816 - dm2 - the second, finer DM object 817 818 Output Parameter: 819 . ctx - the injection 820 821 Level: developer 822 823 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 824 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 825 826 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 827 828 @*/ 829 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 830 { 831 PetscErrorCode ierr; 832 833 PetscFunctionBegin; 834 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 835 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 836 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 837 PetscFunctionReturn(0); 838 } 839 840 #undef __FUNCT__ 841 #define __FUNCT__ "DMCreateColoring" 842 /*@C 843 DMCreateColoring - Gets coloring for a DMDA or DMComposite 844 845 Collective on DM 846 847 Input Parameter: 848 + dm - the DM object 849 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 850 - matype - either MATAIJ or MATBAIJ 851 852 Output Parameter: 853 . coloring - the coloring 854 855 Level: developer 856 857 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 858 859 @*/ 860 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 861 { 862 PetscErrorCode ierr; 863 864 PetscFunctionBegin; 865 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 866 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 867 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 868 PetscFunctionReturn(0); 869 } 870 871 #undef __FUNCT__ 872 #define __FUNCT__ "DMCreateMatrix" 873 /*@C 874 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 875 876 Collective on DM 877 878 Input Parameter: 879 + dm - the DM object 880 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 881 any type which inherits from one of these (such as MATAIJ) 882 883 Output Parameter: 884 . mat - the empty Jacobian 885 886 Level: beginner 887 888 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 889 do not need to do it yourself. 890 891 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 892 the nonzero pattern call DMDASetMatPreallocateOnly() 893 894 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 895 internally by PETSc. 896 897 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 898 the indices for the global numbering for DMDAs which is complicated. 899 900 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 901 902 @*/ 903 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 904 { 905 PetscErrorCode ierr; 906 907 PetscFunctionBegin; 908 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 909 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 910 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 911 #endif 912 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 913 PetscValidPointer(mat,3); 914 if (dm->mattype) { 915 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 916 } else { 917 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 918 } 919 PetscFunctionReturn(0); 920 } 921 922 #undef __FUNCT__ 923 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 924 /*@ 925 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 926 preallocated but the nonzero structure and zero values will not be set. 927 928 Logically Collective on DMDA 929 930 Input Parameter: 931 + dm - the DM 932 - only - PETSC_TRUE if only want preallocation 933 934 Level: developer 935 .seealso DMCreateMatrix() 936 @*/ 937 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 938 { 939 PetscFunctionBegin; 940 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 941 dm->prealloc_only = only; 942 PetscFunctionReturn(0); 943 } 944 945 #undef __FUNCT__ 946 #define __FUNCT__ "DMGetWorkArray" 947 /*@C 948 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 949 950 Not Collective 951 952 Input Parameters: 953 + dm - the DM object 954 . count - The minium size 955 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 956 957 Output Parameter: 958 . array - the work array 959 960 Level: developer 961 962 .seealso DMDestroy(), DMCreate() 963 @*/ 964 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 965 { 966 PetscErrorCode ierr; 967 DMWorkLink link; 968 size_t size; 969 970 PetscFunctionBegin; 971 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 972 PetscValidPointer(mem,4); 973 if (dm->workin) { 974 link = dm->workin; 975 dm->workin = dm->workin->next; 976 } else { 977 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 978 } 979 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 980 if (size*count > link->bytes) { 981 ierr = PetscFree(link->mem);CHKERRQ(ierr); 982 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 983 link->bytes = size*count; 984 } 985 link->next = dm->workout; 986 dm->workout = link; 987 *(void**)mem = link->mem; 988 PetscFunctionReturn(0); 989 } 990 991 #undef __FUNCT__ 992 #define __FUNCT__ "DMRestoreWorkArray" 993 /*@C 994 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 995 996 Not Collective 997 998 Input Parameters: 999 + dm - the DM object 1000 . count - The minium size 1001 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1002 1003 Output Parameter: 1004 . array - the work array 1005 1006 Level: developer 1007 1008 .seealso DMDestroy(), DMCreate() 1009 @*/ 1010 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1011 { 1012 DMWorkLink *p,link; 1013 1014 PetscFunctionBegin; 1015 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1016 PetscValidPointer(mem,4); 1017 for (p=&dm->workout; (link=*p); p=&link->next) { 1018 if (link->mem == *(void**)mem) { 1019 *p = link->next; 1020 link->next = dm->workin; 1021 dm->workin = link; 1022 *(void**)mem = PETSC_NULL; 1023 PetscFunctionReturn(0); 1024 } 1025 } 1026 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1027 PetscFunctionReturn(0); 1028 } 1029 1030 #undef __FUNCT__ 1031 #define __FUNCT__ "DMSetNullSpaceConstructor" 1032 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1033 { 1034 PetscFunctionBegin; 1035 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1036 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1037 dm->nullspaceConstructors[field] = nullsp; 1038 PetscFunctionReturn(0); 1039 } 1040 1041 #undef __FUNCT__ 1042 #define __FUNCT__ "DMCreateFieldIS" 1043 /*@C 1044 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1045 1046 Not collective 1047 1048 Input Parameter: 1049 . dm - the DM object 1050 1051 Output Parameters: 1052 + numFields - The number of fields (or PETSC_NULL if not requested) 1053 . fieldNames - The name for each field (or PETSC_NULL if not requested) 1054 - fields - The global indices for each field (or PETSC_NULL if not requested) 1055 1056 Level: intermediate 1057 1058 Notes: 1059 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1060 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1061 PetscFree(). 1062 1063 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1064 @*/ 1065 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1066 { 1067 PetscSection section, sectionGlobal; 1068 PetscErrorCode ierr; 1069 1070 PetscFunctionBegin; 1071 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1072 if (numFields) { 1073 PetscValidPointer(numFields,2); 1074 *numFields = 0; 1075 } 1076 if (fieldNames) { 1077 PetscValidPointer(fieldNames,3); 1078 *fieldNames = PETSC_NULL; 1079 } 1080 if (fields) { 1081 PetscValidPointer(fields,4); 1082 *fields = PETSC_NULL; 1083 } 1084 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1085 if (section) { 1086 PetscInt *fieldSizes, **fieldIndices; 1087 PetscInt nF, f, pStart, pEnd, p; 1088 1089 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1090 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1091 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 1092 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1093 for (f = 0; f < nF; ++f) { 1094 fieldSizes[f] = 0; 1095 } 1096 for (p = pStart; p < pEnd; ++p) { 1097 PetscInt gdof; 1098 1099 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1100 if (gdof > 0) { 1101 for (f = 0; f < nF; ++f) { 1102 PetscInt fdof, fcdof; 1103 1104 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1105 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1106 fieldSizes[f] += fdof-fcdof; 1107 } 1108 } 1109 } 1110 for (f = 0; f < nF; ++f) { 1111 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1112 fieldSizes[f] = 0; 1113 } 1114 for (p = pStart; p < pEnd; ++p) { 1115 PetscInt gdof, goff; 1116 1117 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1118 if (gdof > 0) { 1119 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1120 for (f = 0; f < nF; ++f) { 1121 PetscInt fdof, fcdof, fc; 1122 1123 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1124 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1125 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1126 fieldIndices[f][fieldSizes[f]] = goff++; 1127 } 1128 } 1129 } 1130 } 1131 if (numFields) {*numFields = nF;} 1132 if (fieldNames) { 1133 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 1134 for (f = 0; f < nF; ++f) { 1135 const char *fieldName; 1136 1137 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1138 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 1139 } 1140 } 1141 if (fields) { 1142 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1143 for (f = 0; f < nF; ++f) { 1144 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1145 } 1146 } 1147 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1148 } else { 1149 if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 1150 } 1151 PetscFunctionReturn(0); 1152 } 1153 1154 #undef __FUNCT__ 1155 #define __FUNCT__ "DMCreateFieldDecompositionDM" 1156 /*@C 1157 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 1158 1159 Not Collective 1160 1161 Input Parameters: 1162 + dm - the DM object 1163 - name - the name of the field decomposition 1164 1165 Output Parameter: 1166 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 1167 1168 Level: advanced 1169 1170 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1171 @*/ 1172 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 1173 { 1174 PetscErrorCode ierr; 1175 1176 PetscFunctionBegin; 1177 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1178 PetscValidCharPointer(name,2); 1179 PetscValidPointer(ddm,3); 1180 *ddm = PETSC_NULL; 1181 if (dm->ops->createfielddecompositiondm) { 1182 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1183 } 1184 PetscFunctionReturn(0); 1185 } 1186 1187 #undef __FUNCT__ 1188 #define __FUNCT__ "DMCreateFieldDecomposition" 1189 /*@C 1190 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1191 corresponding to different fields: each IS contains the global indices of the dofs of the 1192 corresponding field. The optional list of DMs define the DM for each subproblem. 1193 Generalizes DMCreateFieldIS(). 1194 1195 Not collective 1196 1197 Input Parameter: 1198 . dm - the DM object 1199 1200 Output Parameters: 1201 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1202 . namelist - The name for each field (or PETSC_NULL if not requested) 1203 . islist - The global indices for each field (or PETSC_NULL if not requested) 1204 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1205 1206 Level: intermediate 1207 1208 Notes: 1209 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1210 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1211 and all of the arrays should be freed with PetscFree(). 1212 1213 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1214 @*/ 1215 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1216 { 1217 PetscErrorCode ierr; 1218 1219 PetscFunctionBegin; 1220 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1221 if (len) {PetscValidPointer(len,2); *len = 0;} 1222 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1223 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1224 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1225 if (!dm->ops->createfielddecomposition) { 1226 PetscSection section; 1227 PetscInt numFields, f; 1228 1229 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1230 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1231 if (section && numFields && dm->ops->createsubdm) { 1232 *len = numFields; 1233 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1234 for (f = 0; f < numFields; ++f) { 1235 const char *fieldName; 1236 1237 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1238 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1239 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1240 } 1241 } else { 1242 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1243 /* By default there are no DMs associated with subproblems. */ 1244 if (dmlist) *dmlist = PETSC_NULL; 1245 } 1246 } 1247 else { 1248 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1249 } 1250 PetscFunctionReturn(0); 1251 } 1252 1253 #undef __FUNCT__ 1254 #define __FUNCT__ "DMCreateSubDM" 1255 /*@C 1256 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1257 The fields are defined by DMCreateFieldIS(). 1258 1259 Not collective 1260 1261 Input Parameters: 1262 + dm - the DM object 1263 . numFields - number of fields in this subproblem 1264 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1265 1266 Output Parameters: 1267 . is - The global indices for the subproblem 1268 - dm - The DM for the subproblem 1269 1270 Level: intermediate 1271 1272 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1273 @*/ 1274 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1275 { 1276 PetscErrorCode ierr; 1277 1278 PetscFunctionBegin; 1279 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1280 PetscValidPointer(fields,3); 1281 if (is) {PetscValidPointer(is,4);} 1282 if (subdm) {PetscValidPointer(subdm,5);} 1283 if (dm->ops->createsubdm) { 1284 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1285 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1286 PetscFunctionReturn(0); 1287 } 1288 1289 #undef __FUNCT__ 1290 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1291 /*@C 1292 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1293 1294 Not Collective 1295 1296 Input Parameters: 1297 + dm - the DM object 1298 - name - the name of the subdomain decomposition 1299 1300 Output Parameter: 1301 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1302 1303 Level: advanced 1304 1305 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1306 @*/ 1307 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1308 { 1309 PetscErrorCode ierr; 1310 1311 PetscFunctionBegin; 1312 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1313 PetscValidCharPointer(name,2); 1314 PetscValidPointer(ddm,3); 1315 *ddm = PETSC_NULL; 1316 if (dm->ops->createdomaindecompositiondm) { 1317 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1318 } 1319 PetscFunctionReturn(0); 1320 } 1321 1322 #undef __FUNCT__ 1323 #define __FUNCT__ "DMCreateDomainDecomposition" 1324 /*@C 1325 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1326 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1327 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1328 define a nonoverlapping covering, while outer subdomains can overlap. 1329 The optional list of DMs define the DM for each subproblem. 1330 1331 Not collective 1332 1333 Input Parameter: 1334 . dm - the DM object 1335 1336 Output Parameters: 1337 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1338 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1339 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1340 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1341 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1342 1343 Level: intermediate 1344 1345 Notes: 1346 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1347 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1348 and all of the arrays should be freed with PetscFree(). 1349 1350 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1351 @*/ 1352 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1353 { 1354 PetscErrorCode ierr; 1355 1356 PetscFunctionBegin; 1357 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1358 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1359 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1360 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1361 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1362 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1363 if (dm->ops->createdomaindecomposition) { 1364 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1365 } 1366 PetscFunctionReturn(0); 1367 } 1368 1369 #undef __FUNCT__ 1370 #define __FUNCT__ "DMRefine" 1371 /*@ 1372 DMRefine - Refines a DM object 1373 1374 Collective on DM 1375 1376 Input Parameter: 1377 + dm - the DM object 1378 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1379 1380 Output Parameter: 1381 . dmf - the refined DM, or PETSC_NULL 1382 1383 Note: If no refinement was done, the return value is PETSC_NULL 1384 1385 Level: developer 1386 1387 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1388 @*/ 1389 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1390 { 1391 PetscErrorCode ierr; 1392 DMRefineHookLink link; 1393 1394 PetscFunctionBegin; 1395 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1396 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1397 if (*dmf) { 1398 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1399 (*dmf)->ops->initialguess = dm->ops->initialguess; 1400 (*dmf)->ops->function = dm->ops->function; 1401 (*dmf)->ops->functionj = dm->ops->functionj; 1402 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1403 (*dmf)->ops->jacobian = dm->ops->jacobian; 1404 } 1405 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1406 (*dmf)->ctx = dm->ctx; 1407 (*dmf)->leveldown = dm->leveldown; 1408 (*dmf)->levelup = dm->levelup + 1; 1409 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1410 for (link=dm->refinehook; link; link=link->next) { 1411 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1412 } 1413 } 1414 PetscFunctionReturn(0); 1415 } 1416 1417 #undef __FUNCT__ 1418 #define __FUNCT__ "DMRefineHookAdd" 1419 /*@ 1420 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1421 1422 Logically Collective 1423 1424 Input Arguments: 1425 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1426 . refinehook - function to run when setting up a coarser level 1427 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1428 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1429 1430 Calling sequence of refinehook: 1431 $ refinehook(DM coarse,DM fine,void *ctx); 1432 1433 + coarse - coarse level DM 1434 . fine - fine level DM to interpolate problem to 1435 - ctx - optional user-defined function context 1436 1437 Calling sequence for interphook: 1438 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1439 1440 + coarse - coarse level DM 1441 . interp - matrix interpolating a coarse-level solution to the finer grid 1442 . fine - fine level DM to update 1443 - ctx - optional user-defined function context 1444 1445 Level: advanced 1446 1447 Notes: 1448 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1449 1450 If this function is called multiple times, the hooks will be run in the order they are added. 1451 1452 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1453 @*/ 1454 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1455 { 1456 PetscErrorCode ierr; 1457 DMRefineHookLink link,*p; 1458 1459 PetscFunctionBegin; 1460 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1461 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1462 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1463 link->refinehook = refinehook; 1464 link->interphook = interphook; 1465 link->ctx = ctx; 1466 link->next = PETSC_NULL; 1467 *p = link; 1468 PetscFunctionReturn(0); 1469 } 1470 1471 #undef __FUNCT__ 1472 #define __FUNCT__ "DMInterpolate" 1473 /*@ 1474 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1475 1476 Collective if any hooks are 1477 1478 Input Arguments: 1479 + coarse - coarser DM to use as a base 1480 . restrct - interpolation matrix, apply using MatInterpolate() 1481 - fine - finer DM to update 1482 1483 Level: developer 1484 1485 .seealso: DMRefineHookAdd(), MatInterpolate() 1486 @*/ 1487 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1488 { 1489 PetscErrorCode ierr; 1490 DMRefineHookLink link; 1491 1492 PetscFunctionBegin; 1493 for (link=fine->refinehook; link; link=link->next) { 1494 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1495 } 1496 PetscFunctionReturn(0); 1497 } 1498 1499 #undef __FUNCT__ 1500 #define __FUNCT__ "DMGetRefineLevel" 1501 /*@ 1502 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1503 1504 Not Collective 1505 1506 Input Parameter: 1507 . dm - the DM object 1508 1509 Output Parameter: 1510 . level - number of refinements 1511 1512 Level: developer 1513 1514 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1515 1516 @*/ 1517 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1518 { 1519 PetscFunctionBegin; 1520 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1521 *level = dm->levelup; 1522 PetscFunctionReturn(0); 1523 } 1524 1525 #undef __FUNCT__ 1526 #define __FUNCT__ "DMGlobalToLocalBegin" 1527 /*@ 1528 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1529 1530 Neighbor-wise Collective on DM 1531 1532 Input Parameters: 1533 + dm - the DM object 1534 . g - the global vector 1535 . mode - INSERT_VALUES or ADD_VALUES 1536 - l - the local vector 1537 1538 1539 Level: beginner 1540 1541 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1542 1543 @*/ 1544 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1545 { 1546 PetscSF sf; 1547 PetscErrorCode ierr; 1548 1549 PetscFunctionBegin; 1550 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1551 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1552 if (sf) { 1553 PetscScalar *lArray, *gArray; 1554 1555 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1556 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1557 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1558 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1559 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1560 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1561 } else { 1562 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1563 } 1564 PetscFunctionReturn(0); 1565 } 1566 1567 #undef __FUNCT__ 1568 #define __FUNCT__ "DMGlobalToLocalEnd" 1569 /*@ 1570 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1571 1572 Neighbor-wise Collective on DM 1573 1574 Input Parameters: 1575 + dm - the DM object 1576 . g - the global vector 1577 . mode - INSERT_VALUES or ADD_VALUES 1578 - l - the local vector 1579 1580 1581 Level: beginner 1582 1583 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1584 1585 @*/ 1586 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1587 { 1588 PetscSF sf; 1589 PetscErrorCode ierr; 1590 PetscScalar *lArray, *gArray; 1591 1592 PetscFunctionBegin; 1593 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1594 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1595 if (sf) { 1596 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1597 1598 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1599 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1600 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1601 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1602 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1603 } else { 1604 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1605 } 1606 PetscFunctionReturn(0); 1607 } 1608 1609 #undef __FUNCT__ 1610 #define __FUNCT__ "DMLocalToGlobalBegin" 1611 /*@ 1612 DMLocalToGlobalBegin - updates global vectors from local vectors 1613 1614 Neighbor-wise Collective on DM 1615 1616 Input Parameters: 1617 + dm - the DM object 1618 . l - the local vector 1619 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1620 base point. 1621 - - the global vector 1622 1623 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1624 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1625 global array to the final global array with VecAXPY(). 1626 1627 Level: beginner 1628 1629 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1630 1631 @*/ 1632 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1633 { 1634 PetscSF sf; 1635 PetscErrorCode ierr; 1636 1637 PetscFunctionBegin; 1638 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1639 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1640 if (sf) { 1641 MPI_Op op; 1642 PetscScalar *lArray, *gArray; 1643 1644 switch(mode) { 1645 case INSERT_VALUES: 1646 case INSERT_ALL_VALUES: 1647 #if defined(PETSC_HAVE_MPI_REPLACE) 1648 op = MPI_REPLACE; break; 1649 #else 1650 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1651 #endif 1652 case ADD_VALUES: 1653 case ADD_ALL_VALUES: 1654 op = MPI_SUM; break; 1655 default: 1656 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1657 } 1658 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1659 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1660 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1661 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1662 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1663 } else { 1664 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1665 } 1666 PetscFunctionReturn(0); 1667 } 1668 1669 #undef __FUNCT__ 1670 #define __FUNCT__ "DMLocalToGlobalEnd" 1671 /*@ 1672 DMLocalToGlobalEnd - updates global vectors from local vectors 1673 1674 Neighbor-wise Collective on DM 1675 1676 Input Parameters: 1677 + dm - the DM object 1678 . l - the local vector 1679 . mode - INSERT_VALUES or ADD_VALUES 1680 - g - the global vector 1681 1682 1683 Level: beginner 1684 1685 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1686 1687 @*/ 1688 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1689 { 1690 PetscSF sf; 1691 PetscErrorCode ierr; 1692 1693 PetscFunctionBegin; 1694 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1695 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1696 if (sf) { 1697 MPI_Op op; 1698 PetscScalar *lArray, *gArray; 1699 1700 switch(mode) { 1701 case INSERT_VALUES: 1702 case INSERT_ALL_VALUES: 1703 #if defined(PETSC_HAVE_MPI_REPLACE) 1704 op = MPI_REPLACE; break; 1705 #else 1706 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1707 #endif 1708 case ADD_VALUES: 1709 case ADD_ALL_VALUES: 1710 op = MPI_SUM; break; 1711 default: 1712 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1713 } 1714 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1715 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1716 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1717 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1718 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1719 } else { 1720 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1721 } 1722 PetscFunctionReturn(0); 1723 } 1724 1725 #undef __FUNCT__ 1726 #define __FUNCT__ "DMComputeJacobianDefault" 1727 /*@ 1728 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1729 1730 Collective on DM 1731 1732 Input Parameter: 1733 + dm - the DM object 1734 . x - location to compute Jacobian at; may be ignored for linear problems 1735 . A - matrix that defines the operator for the linear solve 1736 - B - the matrix used to construct the preconditioner 1737 1738 Level: developer 1739 1740 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1741 DMSetFunction() 1742 1743 @*/ 1744 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1745 { 1746 PetscErrorCode ierr; 1747 1748 PetscFunctionBegin; 1749 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1750 *stflag = SAME_NONZERO_PATTERN; 1751 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1752 if (A != B) { 1753 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1754 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1755 } 1756 PetscFunctionReturn(0); 1757 } 1758 1759 #undef __FUNCT__ 1760 #define __FUNCT__ "DMCoarsen" 1761 /*@ 1762 DMCoarsen - Coarsens a DM object 1763 1764 Collective on DM 1765 1766 Input Parameter: 1767 + dm - the DM object 1768 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1769 1770 Output Parameter: 1771 . dmc - the coarsened DM 1772 1773 Level: developer 1774 1775 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1776 1777 @*/ 1778 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1779 { 1780 PetscErrorCode ierr; 1781 DMCoarsenHookLink link; 1782 1783 PetscFunctionBegin; 1784 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1785 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1786 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1787 (*dmc)->ops->initialguess = dm->ops->initialguess; 1788 (*dmc)->ops->function = dm->ops->function; 1789 (*dmc)->ops->functionj = dm->ops->functionj; 1790 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1791 (*dmc)->ops->jacobian = dm->ops->jacobian; 1792 } 1793 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1794 (*dmc)->ctx = dm->ctx; 1795 (*dmc)->levelup = dm->levelup; 1796 (*dmc)->leveldown = dm->leveldown + 1; 1797 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1798 for (link=dm->coarsenhook; link; link=link->next) { 1799 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1800 } 1801 PetscFunctionReturn(0); 1802 } 1803 1804 #undef __FUNCT__ 1805 #define __FUNCT__ "DMCoarsenHookAdd" 1806 /*@ 1807 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1808 1809 Logically Collective 1810 1811 Input Arguments: 1812 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1813 . coarsenhook - function to run when setting up a coarser level 1814 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1815 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1816 1817 Calling sequence of coarsenhook: 1818 $ coarsenhook(DM fine,DM coarse,void *ctx); 1819 1820 + fine - fine level DM 1821 . coarse - coarse level DM to restrict problem to 1822 - ctx - optional user-defined function context 1823 1824 Calling sequence for restricthook: 1825 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1826 1827 + fine - fine level DM 1828 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1829 . rscale - scaling vector for restriction 1830 . inject - matrix restricting by injection 1831 . coarse - coarse level DM to update 1832 - ctx - optional user-defined function context 1833 1834 Level: advanced 1835 1836 Notes: 1837 This function is only needed if auxiliary data needs to be set up on coarse grids. 1838 1839 If this function is called multiple times, the hooks will be run in the order they are added. 1840 1841 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1842 extract the finest level information from its context (instead of from the SNES). 1843 1844 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1845 @*/ 1846 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1847 { 1848 PetscErrorCode ierr; 1849 DMCoarsenHookLink link,*p; 1850 1851 PetscFunctionBegin; 1852 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1853 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1854 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1855 link->coarsenhook = coarsenhook; 1856 link->restricthook = restricthook; 1857 link->ctx = ctx; 1858 link->next = PETSC_NULL; 1859 *p = link; 1860 PetscFunctionReturn(0); 1861 } 1862 1863 #undef __FUNCT__ 1864 #define __FUNCT__ "DMRestrict" 1865 /*@ 1866 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1867 1868 Collective if any hooks are 1869 1870 Input Arguments: 1871 + fine - finer DM to use as a base 1872 . restrct - restriction matrix, apply using MatRestrict() 1873 . inject - injection matrix, also use MatRestrict() 1874 - coarse - coarer DM to update 1875 1876 Level: developer 1877 1878 .seealso: DMCoarsenHookAdd(), MatRestrict() 1879 @*/ 1880 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1881 { 1882 PetscErrorCode ierr; 1883 DMCoarsenHookLink link; 1884 1885 PetscFunctionBegin; 1886 for (link=fine->coarsenhook; link; link=link->next) { 1887 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1888 } 1889 PetscFunctionReturn(0); 1890 } 1891 1892 #undef __FUNCT__ 1893 #define __FUNCT__ "DMBlockRestrictHookAdd" 1894 /*@ 1895 DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1896 1897 Logically Collective 1898 1899 Input Arguments: 1900 + global - global DM 1901 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1902 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1903 1904 Calling sequence for restricthook: 1905 $ restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1906 1907 + global - global DM 1908 . out - scatter to the outer (with ghost and overlap points) block vector 1909 . in - scatter to block vector values only owned locally 1910 . block - block DM (may just be a shell if the global DM is passed in correctly) 1911 - ctx - optional user-defined function context 1912 1913 Level: advanced 1914 1915 Notes: 1916 This function is only needed if auxiliary data needs to be set up on coarse grids. 1917 1918 If this function is called multiple times, the hooks will be run in the order they are added. 1919 1920 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1921 extract the finest level information from its context (instead of from the SNES). 1922 1923 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1924 @*/ 1925 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1926 { 1927 PetscErrorCode ierr; 1928 DMBlockRestrictHookLink link,*p; 1929 1930 PetscFunctionBegin; 1931 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1932 for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1933 ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr); 1934 link->restricthook = restricthook; 1935 link->ctx = ctx; 1936 link->next = PETSC_NULL; 1937 *p = link; 1938 PetscFunctionReturn(0); 1939 } 1940 1941 #undef __FUNCT__ 1942 #define __FUNCT__ "DMBlockRestrict" 1943 /*@ 1944 DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd() 1945 1946 Collective if any hooks are 1947 1948 Input Arguments: 1949 + fine - finer DM to use as a base 1950 . restrct - restriction matrix, apply using MatRestrict() 1951 . inject - injection matrix, also use MatRestrict() 1952 - coarse - coarer DM to update 1953 1954 Level: developer 1955 1956 .seealso: DMCoarsenHookAdd(), MatRestrict() 1957 @*/ 1958 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block) 1959 { 1960 PetscErrorCode ierr; 1961 DMBlockRestrictHookLink link; 1962 1963 PetscFunctionBegin; 1964 for (link=global->blockrestricthook; link; link=link->next) { 1965 if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);} 1966 } 1967 PetscFunctionReturn(0); 1968 } 1969 1970 #undef __FUNCT__ 1971 #define __FUNCT__ "DMGetCoarsenLevel" 1972 /*@ 1973 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1974 1975 Not Collective 1976 1977 Input Parameter: 1978 . dm - the DM object 1979 1980 Output Parameter: 1981 . level - number of coarsenings 1982 1983 Level: developer 1984 1985 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1986 1987 @*/ 1988 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 1989 { 1990 PetscFunctionBegin; 1991 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1992 *level = dm->leveldown; 1993 PetscFunctionReturn(0); 1994 } 1995 1996 1997 1998 #undef __FUNCT__ 1999 #define __FUNCT__ "DMRefineHierarchy" 2000 /*@C 2001 DMRefineHierarchy - Refines a DM object, all levels at once 2002 2003 Collective on DM 2004 2005 Input Parameter: 2006 + dm - the DM object 2007 - nlevels - the number of levels of refinement 2008 2009 Output Parameter: 2010 . dmf - the refined DM hierarchy 2011 2012 Level: developer 2013 2014 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2015 2016 @*/ 2017 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2018 { 2019 PetscErrorCode ierr; 2020 2021 PetscFunctionBegin; 2022 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2023 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2024 if (nlevels == 0) PetscFunctionReturn(0); 2025 if (dm->ops->refinehierarchy) { 2026 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2027 } else if (dm->ops->refine) { 2028 PetscInt i; 2029 2030 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 2031 for (i=1; i<nlevels; i++) { 2032 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 2033 } 2034 } else { 2035 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2036 } 2037 PetscFunctionReturn(0); 2038 } 2039 2040 #undef __FUNCT__ 2041 #define __FUNCT__ "DMCoarsenHierarchy" 2042 /*@C 2043 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2044 2045 Collective on DM 2046 2047 Input Parameter: 2048 + dm - the DM object 2049 - nlevels - the number of levels of coarsening 2050 2051 Output Parameter: 2052 . dmc - the coarsened DM hierarchy 2053 2054 Level: developer 2055 2056 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2057 2058 @*/ 2059 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2060 { 2061 PetscErrorCode ierr; 2062 2063 PetscFunctionBegin; 2064 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2065 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2066 if (nlevels == 0) PetscFunctionReturn(0); 2067 PetscValidPointer(dmc,3); 2068 if (dm->ops->coarsenhierarchy) { 2069 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2070 } else if (dm->ops->coarsen) { 2071 PetscInt i; 2072 2073 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 2074 for (i=1; i<nlevels; i++) { 2075 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 2076 } 2077 } else { 2078 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2079 } 2080 PetscFunctionReturn(0); 2081 } 2082 2083 #undef __FUNCT__ 2084 #define __FUNCT__ "DMCreateAggregates" 2085 /*@ 2086 DMCreateAggregates - Gets the aggregates that map between 2087 grids associated with two DMs. 2088 2089 Collective on DM 2090 2091 Input Parameters: 2092 + dmc - the coarse grid DM 2093 - dmf - the fine grid DM 2094 2095 Output Parameters: 2096 . rest - the restriction matrix (transpose of the projection matrix) 2097 2098 Level: intermediate 2099 2100 .keywords: interpolation, restriction, multigrid 2101 2102 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2103 @*/ 2104 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2105 { 2106 PetscErrorCode ierr; 2107 2108 PetscFunctionBegin; 2109 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2110 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2111 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2112 PetscFunctionReturn(0); 2113 } 2114 2115 #undef __FUNCT__ 2116 #define __FUNCT__ "DMSetApplicationContextDestroy" 2117 /*@C 2118 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2119 2120 Not Collective 2121 2122 Input Parameters: 2123 + dm - the DM object 2124 - destroy - the destroy function 2125 2126 Level: intermediate 2127 2128 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2129 2130 @*/ 2131 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2132 { 2133 PetscFunctionBegin; 2134 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2135 dm->ctxdestroy = destroy; 2136 PetscFunctionReturn(0); 2137 } 2138 2139 #undef __FUNCT__ 2140 #define __FUNCT__ "DMSetApplicationContext" 2141 /*@ 2142 DMSetApplicationContext - Set a user context into a DM object 2143 2144 Not Collective 2145 2146 Input Parameters: 2147 + dm - the DM object 2148 - ctx - the user context 2149 2150 Level: intermediate 2151 2152 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2153 2154 @*/ 2155 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2156 { 2157 PetscFunctionBegin; 2158 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2159 dm->ctx = ctx; 2160 PetscFunctionReturn(0); 2161 } 2162 2163 #undef __FUNCT__ 2164 #define __FUNCT__ "DMGetApplicationContext" 2165 /*@ 2166 DMGetApplicationContext - Gets a user context from a DM object 2167 2168 Not Collective 2169 2170 Input Parameter: 2171 . dm - the DM object 2172 2173 Output Parameter: 2174 . ctx - the user context 2175 2176 Level: intermediate 2177 2178 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2179 2180 @*/ 2181 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2182 { 2183 PetscFunctionBegin; 2184 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2185 *(void**)ctx = dm->ctx; 2186 PetscFunctionReturn(0); 2187 } 2188 2189 #undef __FUNCT__ 2190 #define __FUNCT__ "DMSetInitialGuess" 2191 /*@C 2192 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 2193 2194 Logically Collective on DM 2195 2196 Input Parameter: 2197 + dm - the DM object to destroy 2198 - f - the function to compute the initial guess 2199 2200 Level: intermediate 2201 2202 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2203 2204 @*/ 2205 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 2206 { 2207 PetscFunctionBegin; 2208 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2209 dm->ops->initialguess = f; 2210 PetscFunctionReturn(0); 2211 } 2212 2213 #undef __FUNCT__ 2214 #define __FUNCT__ "DMSetFunction" 2215 /*@C 2216 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 2217 2218 Logically Collective on DM 2219 2220 Input Parameter: 2221 + dm - the DM object 2222 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 2223 2224 Level: intermediate 2225 2226 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 2227 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 2228 2229 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2230 DMSetJacobian() 2231 2232 @*/ 2233 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2234 { 2235 PetscFunctionBegin; 2236 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2237 dm->ops->function = f; 2238 if (f) { 2239 dm->ops->functionj = f; 2240 } 2241 PetscFunctionReturn(0); 2242 } 2243 2244 #undef __FUNCT__ 2245 #define __FUNCT__ "DMSetJacobian" 2246 /*@C 2247 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 2248 2249 Logically Collective on DM 2250 2251 Input Parameter: 2252 + dm - the DM object to destroy 2253 - f - the function to compute the matrix entries 2254 2255 Level: intermediate 2256 2257 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2258 DMSetFunction() 2259 2260 @*/ 2261 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 2262 { 2263 PetscFunctionBegin; 2264 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2265 dm->ops->jacobian = f; 2266 PetscFunctionReturn(0); 2267 } 2268 2269 #undef __FUNCT__ 2270 #define __FUNCT__ "DMSetVariableBounds" 2271 /*@C 2272 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2273 2274 Logically Collective on DM 2275 2276 Input Parameter: 2277 + dm - the DM object 2278 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 2279 2280 Level: intermediate 2281 2282 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2283 DMSetJacobian() 2284 2285 @*/ 2286 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2287 { 2288 PetscFunctionBegin; 2289 dm->ops->computevariablebounds = f; 2290 PetscFunctionReturn(0); 2291 } 2292 2293 #undef __FUNCT__ 2294 #define __FUNCT__ "DMHasVariableBounds" 2295 /*@ 2296 DMHasVariableBounds - does the DM object have a variable bounds function? 2297 2298 Not Collective 2299 2300 Input Parameter: 2301 . dm - the DM object to destroy 2302 2303 Output Parameter: 2304 . flg - PETSC_TRUE if the variable bounds function exists 2305 2306 Level: developer 2307 2308 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2309 2310 @*/ 2311 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2312 { 2313 PetscFunctionBegin; 2314 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2315 PetscFunctionReturn(0); 2316 } 2317 2318 #undef __FUNCT__ 2319 #define __FUNCT__ "DMComputeVariableBounds" 2320 /*@C 2321 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2322 2323 Logically Collective on DM 2324 2325 Input Parameters: 2326 + dm - the DM object to destroy 2327 - x - current solution at which the bounds are computed 2328 2329 Output parameters: 2330 + xl - lower bound 2331 - xu - upper bound 2332 2333 Level: intermediate 2334 2335 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2336 DMSetFunction(), DMSetVariableBounds() 2337 2338 @*/ 2339 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2340 { 2341 PetscErrorCode ierr; 2342 PetscFunctionBegin; 2343 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2344 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2345 if (dm->ops->computevariablebounds) { 2346 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2347 } 2348 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2349 PetscFunctionReturn(0); 2350 } 2351 2352 #undef __FUNCT__ 2353 #define __FUNCT__ "DMComputeInitialGuess" 2354 /*@ 2355 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2356 2357 Collective on DM 2358 2359 Input Parameter: 2360 + dm - the DM object to destroy 2361 - x - the vector to hold the initial guess values 2362 2363 Level: developer 2364 2365 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2366 2367 @*/ 2368 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2369 { 2370 PetscErrorCode ierr; 2371 PetscFunctionBegin; 2372 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2373 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2374 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2375 PetscFunctionReturn(0); 2376 } 2377 2378 #undef __FUNCT__ 2379 #define __FUNCT__ "DMHasInitialGuess" 2380 /*@ 2381 DMHasInitialGuess - does the DM object have an initial guess function 2382 2383 Not Collective 2384 2385 Input Parameter: 2386 . dm - the DM object to destroy 2387 2388 Output Parameter: 2389 . flg - PETSC_TRUE if function exists 2390 2391 Level: developer 2392 2393 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2394 2395 @*/ 2396 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2397 { 2398 PetscFunctionBegin; 2399 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2400 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2401 PetscFunctionReturn(0); 2402 } 2403 2404 #undef __FUNCT__ 2405 #define __FUNCT__ "DMHasFunction" 2406 /*@ 2407 DMHasFunction - does the DM object have a function 2408 2409 Not Collective 2410 2411 Input Parameter: 2412 . dm - the DM object to destroy 2413 2414 Output Parameter: 2415 . flg - PETSC_TRUE if function exists 2416 2417 Level: developer 2418 2419 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2420 2421 @*/ 2422 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2423 { 2424 PetscFunctionBegin; 2425 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2426 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2427 PetscFunctionReturn(0); 2428 } 2429 2430 #undef __FUNCT__ 2431 #define __FUNCT__ "DMHasJacobian" 2432 /*@ 2433 DMHasJacobian - does the DM object have a matrix function 2434 2435 Not Collective 2436 2437 Input Parameter: 2438 . dm - the DM object to destroy 2439 2440 Output Parameter: 2441 . flg - PETSC_TRUE if function exists 2442 2443 Level: developer 2444 2445 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2446 2447 @*/ 2448 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2449 { 2450 PetscFunctionBegin; 2451 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2452 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2453 PetscFunctionReturn(0); 2454 } 2455 2456 #undef __FUNCT__ 2457 #define __FUNCT__ "DMHasColoring" 2458 /*@ 2459 DMHasColoring - does the DM object have a method of providing a coloring? 2460 2461 Not Collective 2462 2463 Input Parameter: 2464 . dm - the DM object 2465 2466 Output Parameter: 2467 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2468 2469 Level: developer 2470 2471 .seealso DMHasFunction(), DMCreateColoring() 2472 2473 @*/ 2474 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2475 { 2476 PetscFunctionBegin; 2477 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2478 PetscFunctionReturn(0); 2479 } 2480 2481 #undef __FUNCT__ 2482 #define __FUNCT__ "DMSetVec" 2483 /*@C 2484 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2485 2486 Collective on DM 2487 2488 Input Parameter: 2489 + dm - the DM object 2490 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2491 2492 Level: developer 2493 2494 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2495 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2496 2497 @*/ 2498 PetscErrorCode DMSetVec(DM dm,Vec x) 2499 { 2500 PetscErrorCode ierr; 2501 PetscFunctionBegin; 2502 if (x) { 2503 if (!dm->x) { 2504 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2505 } 2506 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2507 } 2508 else if (dm->x) { 2509 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2510 } 2511 PetscFunctionReturn(0); 2512 } 2513 2514 2515 #undef __FUNCT__ 2516 #define __FUNCT__ "DMComputeFunction" 2517 /*@ 2518 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2519 2520 Collective on DM 2521 2522 Input Parameter: 2523 + dm - the DM object to destroy 2524 . x - the location where the function is evaluationed, may be ignored for linear problems 2525 - b - the vector to hold the right hand side entries 2526 2527 Level: developer 2528 2529 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2530 DMSetJacobian() 2531 2532 @*/ 2533 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2534 { 2535 PetscErrorCode ierr; 2536 PetscFunctionBegin; 2537 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2538 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2539 PetscStackPush("DM user function"); 2540 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2541 PetscStackPop; 2542 PetscFunctionReturn(0); 2543 } 2544 2545 2546 2547 #undef __FUNCT__ 2548 #define __FUNCT__ "DMComputeJacobian" 2549 /*@ 2550 DMComputeJacobian - compute the matrix entries for the solver 2551 2552 Collective on DM 2553 2554 Input Parameter: 2555 + dm - the DM object 2556 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2557 . A - matrix that defines the operator for the linear solve 2558 - B - the matrix used to construct the preconditioner 2559 2560 Level: developer 2561 2562 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2563 DMSetFunction() 2564 2565 @*/ 2566 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2567 { 2568 PetscErrorCode ierr; 2569 2570 PetscFunctionBegin; 2571 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2572 if (!dm->ops->jacobian) { 2573 ISColoring coloring; 2574 MatFDColoring fd; 2575 MatType mtype; 2576 2577 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2578 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2579 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2580 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2581 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2582 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2583 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2584 2585 dm->fd = fd; 2586 dm->ops->jacobian = DMComputeJacobianDefault; 2587 2588 /* don't know why this is needed */ 2589 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2590 } 2591 if (!x) x = dm->x; 2592 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2593 2594 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2595 if (x) { 2596 if (!dm->x) { 2597 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2598 } 2599 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2600 } 2601 if (A != B) { 2602 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2603 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2604 } 2605 PetscFunctionReturn(0); 2606 } 2607 2608 2609 PetscFList DMList = PETSC_NULL; 2610 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2611 2612 #undef __FUNCT__ 2613 #define __FUNCT__ "DMSetType" 2614 /*@C 2615 DMSetType - Builds a DM, for a particular DM implementation. 2616 2617 Collective on DM 2618 2619 Input Parameters: 2620 + dm - The DM object 2621 - method - The name of the DM type 2622 2623 Options Database Key: 2624 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2625 2626 Notes: 2627 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2628 2629 Level: intermediate 2630 2631 .keywords: DM, set, type 2632 .seealso: DMGetType(), DMCreate() 2633 @*/ 2634 PetscErrorCode DMSetType(DM dm, DMType method) 2635 { 2636 PetscErrorCode (*r)(DM); 2637 PetscBool match; 2638 PetscErrorCode ierr; 2639 2640 PetscFunctionBegin; 2641 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2642 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2643 if (match) PetscFunctionReturn(0); 2644 2645 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2646 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2647 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2648 2649 if (dm->ops->destroy) { 2650 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2651 dm->ops->destroy = PETSC_NULL; 2652 } 2653 ierr = (*r)(dm);CHKERRQ(ierr); 2654 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2655 PetscFunctionReturn(0); 2656 } 2657 2658 #undef __FUNCT__ 2659 #define __FUNCT__ "DMGetType" 2660 /*@C 2661 DMGetType - Gets the DM type name (as a string) from the DM. 2662 2663 Not Collective 2664 2665 Input Parameter: 2666 . dm - The DM 2667 2668 Output Parameter: 2669 . type - The DM type name 2670 2671 Level: intermediate 2672 2673 .keywords: DM, get, type, name 2674 .seealso: DMSetType(), DMCreate() 2675 @*/ 2676 PetscErrorCode DMGetType(DM dm, DMType *type) 2677 { 2678 PetscErrorCode ierr; 2679 2680 PetscFunctionBegin; 2681 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2682 PetscValidCharPointer(type,2); 2683 if (!DMRegisterAllCalled) { 2684 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2685 } 2686 *type = ((PetscObject)dm)->type_name; 2687 PetscFunctionReturn(0); 2688 } 2689 2690 #undef __FUNCT__ 2691 #define __FUNCT__ "DMConvert" 2692 /*@C 2693 DMConvert - Converts a DM to another DM, either of the same or different type. 2694 2695 Collective on DM 2696 2697 Input Parameters: 2698 + dm - the DM 2699 - newtype - new DM type (use "same" for the same type) 2700 2701 Output Parameter: 2702 . M - pointer to new DM 2703 2704 Notes: 2705 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2706 the MPI communicator of the generated DM is always the same as the communicator 2707 of the input DM. 2708 2709 Level: intermediate 2710 2711 .seealso: DMCreate() 2712 @*/ 2713 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2714 { 2715 DM B; 2716 char convname[256]; 2717 PetscBool sametype, issame; 2718 PetscErrorCode ierr; 2719 2720 PetscFunctionBegin; 2721 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2722 PetscValidType(dm,1); 2723 PetscValidPointer(M,3); 2724 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2725 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2726 { 2727 PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL; 2728 2729 /* 2730 Order of precedence: 2731 1) See if a specialized converter is known to the current DM. 2732 2) See if a specialized converter is known to the desired DM class. 2733 3) See if a good general converter is registered for the desired class 2734 4) See if a good general converter is known for the current matrix. 2735 5) Use a really basic converter. 2736 */ 2737 2738 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2739 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2740 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2741 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2742 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2743 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2744 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2745 if (conv) goto foundconv; 2746 2747 /* 2) See if a specialized converter is known to the desired DM class. */ 2748 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2749 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2750 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2751 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2752 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2753 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2754 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2755 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2756 if (conv) { 2757 ierr = DMDestroy(&B);CHKERRQ(ierr); 2758 goto foundconv; 2759 } 2760 2761 #if 0 2762 /* 3) See if a good general converter is registered for the desired class */ 2763 conv = B->ops->convertfrom; 2764 ierr = DMDestroy(&B);CHKERRQ(ierr); 2765 if (conv) goto foundconv; 2766 2767 /* 4) See if a good general converter is known for the current matrix */ 2768 if (dm->ops->convert) { 2769 conv = dm->ops->convert; 2770 } 2771 if (conv) goto foundconv; 2772 #endif 2773 2774 /* 5) Use a really basic converter. */ 2775 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2776 2777 foundconv: 2778 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2779 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2780 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2781 } 2782 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2783 PetscFunctionReturn(0); 2784 } 2785 2786 /*--------------------------------------------------------------------------------------------------------------------*/ 2787 2788 #undef __FUNCT__ 2789 #define __FUNCT__ "DMRegister" 2790 /*@C 2791 DMRegister - See DMRegisterDynamic() 2792 2793 Level: advanced 2794 @*/ 2795 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2796 { 2797 char fullname[PETSC_MAX_PATH_LEN]; 2798 PetscErrorCode ierr; 2799 2800 PetscFunctionBegin; 2801 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2802 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2803 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2804 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2805 PetscFunctionReturn(0); 2806 } 2807 2808 2809 /*--------------------------------------------------------------------------------------------------------------------*/ 2810 #undef __FUNCT__ 2811 #define __FUNCT__ "DMRegisterDestroy" 2812 /*@C 2813 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2814 2815 Not Collective 2816 2817 Level: advanced 2818 2819 .keywords: DM, register, destroy 2820 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2821 @*/ 2822 PetscErrorCode DMRegisterDestroy(void) 2823 { 2824 PetscErrorCode ierr; 2825 2826 PetscFunctionBegin; 2827 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2828 DMRegisterAllCalled = PETSC_FALSE; 2829 PetscFunctionReturn(0); 2830 } 2831 2832 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2833 #include <mex.h> 2834 2835 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2836 2837 #undef __FUNCT__ 2838 #define __FUNCT__ "DMComputeFunction_Matlab" 2839 /* 2840 DMComputeFunction_Matlab - Calls the function that has been set with 2841 DMSetFunctionMatlab(). 2842 2843 For linear problems x is null 2844 2845 .seealso: DMSetFunction(), DMGetFunction() 2846 */ 2847 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2848 { 2849 PetscErrorCode ierr; 2850 DMMatlabContext *sctx; 2851 int nlhs = 1,nrhs = 4; 2852 mxArray *plhs[1],*prhs[4]; 2853 long long int lx = 0,ly = 0,ls = 0; 2854 2855 PetscFunctionBegin; 2856 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2857 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2858 PetscCheckSameComm(dm,1,y,3); 2859 2860 /* call Matlab function in ctx with arguments x and y */ 2861 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2862 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2863 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2864 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2865 prhs[0] = mxCreateDoubleScalar((double)ls); 2866 prhs[1] = mxCreateDoubleScalar((double)lx); 2867 prhs[2] = mxCreateDoubleScalar((double)ly); 2868 prhs[3] = mxCreateString(sctx->funcname); 2869 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2870 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2871 mxDestroyArray(prhs[0]); 2872 mxDestroyArray(prhs[1]); 2873 mxDestroyArray(prhs[2]); 2874 mxDestroyArray(prhs[3]); 2875 mxDestroyArray(plhs[0]); 2876 PetscFunctionReturn(0); 2877 } 2878 2879 2880 #undef __FUNCT__ 2881 #define __FUNCT__ "DMSetFunctionMatlab" 2882 /* 2883 DMSetFunctionMatlab - Sets the function evaluation routine 2884 2885 */ 2886 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2887 { 2888 PetscErrorCode ierr; 2889 DMMatlabContext *sctx; 2890 2891 PetscFunctionBegin; 2892 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2893 /* currently sctx is memory bleed */ 2894 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2895 if (!sctx) { 2896 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2897 } 2898 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2899 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2900 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2901 PetscFunctionReturn(0); 2902 } 2903 2904 #undef __FUNCT__ 2905 #define __FUNCT__ "DMComputeJacobian_Matlab" 2906 /* 2907 DMComputeJacobian_Matlab - Calls the function that has been set with 2908 DMSetJacobianMatlab(). 2909 2910 For linear problems x is null 2911 2912 .seealso: DMSetFunction(), DMGetFunction() 2913 */ 2914 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2915 { 2916 PetscErrorCode ierr; 2917 DMMatlabContext *sctx; 2918 int nlhs = 2,nrhs = 5; 2919 mxArray *plhs[2],*prhs[5]; 2920 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2921 2922 PetscFunctionBegin; 2923 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2924 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2925 2926 /* call MATLAB function in ctx with arguments x, A, and B */ 2927 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2928 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2929 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2930 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2931 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2932 prhs[0] = mxCreateDoubleScalar((double)ls); 2933 prhs[1] = mxCreateDoubleScalar((double)lx); 2934 prhs[2] = mxCreateDoubleScalar((double)lA); 2935 prhs[3] = mxCreateDoubleScalar((double)lB); 2936 prhs[4] = mxCreateString(sctx->jacname); 2937 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2938 *str = (MatStructure) mxGetScalar(plhs[0]); 2939 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2940 mxDestroyArray(prhs[0]); 2941 mxDestroyArray(prhs[1]); 2942 mxDestroyArray(prhs[2]); 2943 mxDestroyArray(prhs[3]); 2944 mxDestroyArray(prhs[4]); 2945 mxDestroyArray(plhs[0]); 2946 mxDestroyArray(plhs[1]); 2947 PetscFunctionReturn(0); 2948 } 2949 2950 2951 #undef __FUNCT__ 2952 #define __FUNCT__ "DMSetJacobianMatlab" 2953 /* 2954 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2955 2956 */ 2957 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2958 { 2959 PetscErrorCode ierr; 2960 DMMatlabContext *sctx; 2961 2962 PetscFunctionBegin; 2963 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2964 /* currently sctx is memory bleed */ 2965 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2966 if (!sctx) { 2967 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2968 } 2969 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2970 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2971 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2972 PetscFunctionReturn(0); 2973 } 2974 #endif 2975 2976 #undef __FUNCT__ 2977 #define __FUNCT__ "DMLoad" 2978 /*@C 2979 DMLoad - Loads a DM that has been stored in binary or HDF5 format 2980 with DMView(). 2981 2982 Collective on PetscViewer 2983 2984 Input Parameters: 2985 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2986 some related function before a call to DMLoad(). 2987 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2988 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2989 2990 Level: intermediate 2991 2992 Notes: 2993 Defaults to the DM DA. 2994 2995 Notes for advanced users: 2996 Most users should not need to know the details of the binary storage 2997 format, since DMLoad() and DMView() completely hide these details. 2998 But for anyone who's interested, the standard binary matrix storage 2999 format is 3000 .vb 3001 has not yet been determined 3002 .ve 3003 3004 In addition, PETSc automatically does the byte swapping for 3005 machines that store the bytes reversed, e.g. DEC alpha, freebsd, 3006 linux, Windows and the paragon; thus if you write your own binary 3007 read/write routines you have to swap the bytes; see PetscBinaryRead() 3008 and PetscBinaryWrite() to see how this may be done. 3009 3010 Concepts: vector^loading from file 3011 3012 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 3013 @*/ 3014 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 3015 { 3016 PetscErrorCode ierr; 3017 3018 PetscFunctionBegin; 3019 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 3020 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 3021 3022 if (!((PetscObject)newdm)->type_name) { 3023 ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr); 3024 } 3025 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 3026 PetscFunctionReturn(0); 3027 } 3028 3029 /******************************** FEM Support **********************************/ 3030 3031 #undef __FUNCT__ 3032 #define __FUNCT__ "DMPrintCellVector" 3033 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 3034 PetscInt f; 3035 PetscErrorCode ierr; 3036 3037 PetscFunctionBegin; 3038 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 3039 for (f = 0; f < len; ++f) { 3040 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 3041 } 3042 PetscFunctionReturn(0); 3043 } 3044 3045 #undef __FUNCT__ 3046 #define __FUNCT__ "DMPrintCellMatrix" 3047 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 3048 PetscInt f, g; 3049 PetscErrorCode ierr; 3050 3051 PetscFunctionBegin; 3052 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 3053 for (f = 0; f < rows; ++f) { 3054 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 3055 for (g = 0; g < cols; ++g) { 3056 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 3057 } 3058 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 3059 } 3060 PetscFunctionReturn(0); 3061 } 3062 3063 #undef __FUNCT__ 3064 #define __FUNCT__ "DMGetLocalFunction" 3065 /*@C 3066 DMGetLocalFunction - Get the local residual function from this DM 3067 3068 Not collective 3069 3070 Input Parameter: 3071 . dm - The DM 3072 3073 Output Parameter: 3074 . lf - The local residual function 3075 3076 Calling sequence of lf: 3077 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3078 3079 + snes - the SNES context 3080 . x - local vector with the state at which to evaluate residual 3081 . f - local vector to put residual in 3082 - ctx - optional user-defined function context 3083 3084 Level: intermediate 3085 3086 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3087 @*/ 3088 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 3089 { 3090 PetscFunctionBegin; 3091 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3092 if (lf) *lf = dm->lf; 3093 PetscFunctionReturn(0); 3094 } 3095 3096 #undef __FUNCT__ 3097 #define __FUNCT__ "DMSetLocalFunction" 3098 /*@C 3099 DMSetLocalFunction - Set the local residual function from this DM 3100 3101 Not collective 3102 3103 Input Parameters: 3104 + dm - The DM 3105 - lf - The local residual function 3106 3107 Calling sequence of lf: 3108 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3109 3110 + snes - the SNES context 3111 . x - local vector with the state at which to evaluate residual 3112 . f - local vector to put residual in 3113 - ctx - optional user-defined function context 3114 3115 Level: intermediate 3116 3117 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3118 @*/ 3119 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 3120 { 3121 PetscFunctionBegin; 3122 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3123 dm->lf = lf; 3124 PetscFunctionReturn(0); 3125 } 3126 3127 #undef __FUNCT__ 3128 #define __FUNCT__ "DMGetLocalJacobian" 3129 /*@C 3130 DMGetLocalJacobian - Get the local Jacobian function from this DM 3131 3132 Not collective 3133 3134 Input Parameter: 3135 . dm - The DM 3136 3137 Output Parameter: 3138 . lj - The local Jacobian function 3139 3140 Calling sequence of lj: 3141 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3142 3143 + snes - the SNES context 3144 . x - local vector with the state at which to evaluate residual 3145 . J - matrix to put Jacobian in 3146 . M - matrix to use for defining Jacobian preconditioner 3147 - ctx - optional user-defined function context 3148 3149 Level: intermediate 3150 3151 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3152 @*/ 3153 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 3154 { 3155 PetscFunctionBegin; 3156 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3157 if (lj) *lj = dm->lj; 3158 PetscFunctionReturn(0); 3159 } 3160 3161 #undef __FUNCT__ 3162 #define __FUNCT__ "DMSetLocalJacobian" 3163 /*@C 3164 DMSetLocalJacobian - Set the local Jacobian function from this DM 3165 3166 Not collective 3167 3168 Input Parameters: 3169 + dm - The DM 3170 - lj - The local Jacobian function 3171 3172 Calling sequence of lj: 3173 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3174 3175 + snes - the SNES context 3176 . x - local vector with the state at which to evaluate residual 3177 . J - matrix to put Jacobian in 3178 . M - matrix to use for defining Jacobian preconditioner 3179 - ctx - optional user-defined function context 3180 3181 Level: intermediate 3182 3183 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3184 @*/ 3185 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 3186 { 3187 PetscFunctionBegin; 3188 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3189 dm->lj = lj; 3190 PetscFunctionReturn(0); 3191 } 3192 3193 #undef __FUNCT__ 3194 #define __FUNCT__ "DMGetDefaultSection" 3195 /*@ 3196 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 3197 3198 Input Parameter: 3199 . dm - The DM 3200 3201 Output Parameter: 3202 . section - The PetscSection 3203 3204 Level: intermediate 3205 3206 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3207 3208 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3209 @*/ 3210 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 3211 PetscFunctionBegin; 3212 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3213 PetscValidPointer(section, 2); 3214 *section = dm->defaultSection; 3215 PetscFunctionReturn(0); 3216 } 3217 3218 #undef __FUNCT__ 3219 #define __FUNCT__ "DMSetDefaultSection" 3220 /*@ 3221 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 3222 3223 Input Parameters: 3224 + dm - The DM 3225 - section - The PetscSection 3226 3227 Level: intermediate 3228 3229 Note: Any existing Section will be destroyed 3230 3231 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3232 @*/ 3233 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 3234 PetscInt numFields; 3235 PetscInt f; 3236 PetscErrorCode ierr; 3237 3238 PetscFunctionBegin; 3239 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3240 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 3241 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3242 dm->defaultSection = section; 3243 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 3244 if (numFields) { 3245 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 3246 for (f = 0; f < numFields; ++f) { 3247 const char *name; 3248 3249 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 3250 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 3251 } 3252 } 3253 PetscFunctionReturn(0); 3254 } 3255 3256 #undef __FUNCT__ 3257 #define __FUNCT__ "DMGetDefaultGlobalSection" 3258 /*@ 3259 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 3260 3261 Input Parameter: 3262 . dm - The DM 3263 3264 Output Parameter: 3265 . section - The PetscSection 3266 3267 Level: intermediate 3268 3269 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3270 3271 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 3272 @*/ 3273 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 3274 PetscErrorCode ierr; 3275 3276 PetscFunctionBegin; 3277 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3278 PetscValidPointer(section, 2); 3279 if (!dm->defaultGlobalSection) { 3280 if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 3281 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 3282 } 3283 *section = dm->defaultGlobalSection; 3284 PetscFunctionReturn(0); 3285 } 3286 3287 #undef __FUNCT__ 3288 #define __FUNCT__ "DMSetDefaultGlobalSection" 3289 /*@ 3290 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 3291 3292 Input Parameters: 3293 + dm - The DM 3294 - section - The PetscSection 3295 3296 Level: intermediate 3297 3298 Note: Any existing Section will be destroyed 3299 3300 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 3301 @*/ 3302 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) { 3303 PetscErrorCode ierr; 3304 3305 PetscFunctionBegin; 3306 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3307 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3308 dm->defaultGlobalSection = section; 3309 PetscFunctionReturn(0); 3310 } 3311 3312 #undef __FUNCT__ 3313 #define __FUNCT__ "DMGetDefaultSF" 3314 /*@ 3315 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3316 it is created from the default PetscSection layouts in the DM. 3317 3318 Input Parameter: 3319 . dm - The DM 3320 3321 Output Parameter: 3322 . sf - The PetscSF 3323 3324 Level: intermediate 3325 3326 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3327 3328 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3329 @*/ 3330 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3331 PetscInt nroots; 3332 PetscErrorCode ierr; 3333 3334 PetscFunctionBegin; 3335 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3336 PetscValidPointer(sf, 2); 3337 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3338 if (nroots < 0) { 3339 PetscSection section, gSection; 3340 3341 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3342 if (section) { 3343 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3344 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3345 } else { 3346 *sf = PETSC_NULL; 3347 PetscFunctionReturn(0); 3348 } 3349 } 3350 *sf = dm->defaultSF; 3351 PetscFunctionReturn(0); 3352 } 3353 3354 #undef __FUNCT__ 3355 #define __FUNCT__ "DMSetDefaultSF" 3356 /*@ 3357 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3358 3359 Input Parameters: 3360 + dm - The DM 3361 - sf - The PetscSF 3362 3363 Level: intermediate 3364 3365 Note: Any previous SF is destroyed 3366 3367 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3368 @*/ 3369 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3370 PetscErrorCode ierr; 3371 3372 PetscFunctionBegin; 3373 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3374 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3375 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3376 dm->defaultSF = sf; 3377 PetscFunctionReturn(0); 3378 } 3379 3380 #undef __FUNCT__ 3381 #define __FUNCT__ "DMCreateDefaultSF" 3382 /*@C 3383 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3384 describing the data layout. 3385 3386 Input Parameters: 3387 + dm - The DM 3388 . localSection - PetscSection describing the local data layout 3389 - globalSection - PetscSection describing the global data layout 3390 3391 Level: intermediate 3392 3393 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3394 @*/ 3395 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3396 { 3397 MPI_Comm comm = ((PetscObject) dm)->comm; 3398 PetscLayout layout; 3399 const PetscInt *ranges; 3400 PetscInt *local; 3401 PetscSFNode *remote; 3402 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3403 PetscMPIInt size, rank; 3404 PetscErrorCode ierr; 3405 3406 PetscFunctionBegin; 3407 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3408 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3409 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3410 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3411 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3412 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3413 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3414 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3415 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3416 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3417 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 3418 PetscInt gdof, gcdof; 3419 3420 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3421 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3422 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3423 } 3424 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3425 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3426 for (p = pStart, l = 0; p < pEnd; ++p) { 3427 const PetscInt *cind; 3428 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3429 3430 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3431 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3432 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3433 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3434 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3435 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3436 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3437 if (!gdof) continue; /* Censored point */ 3438 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3439 if (gsize != dof-cdof) { 3440 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", size, p, dof-cdof, dof); 3441 cdof = 0; /* Ignore constraints */ 3442 } 3443 for (d = 0, c = 0; d < dof; ++d) { 3444 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3445 local[l+d-c] = off+d; 3446 } 3447 if (gdof < 0) { 3448 for(d = 0; d < gsize; ++d, ++l) { 3449 PetscInt offset = -(goff+1) + d, r; 3450 3451 for (r = 0; r < size; ++r) { 3452 if ((offset >= ranges[r]) && (offset < ranges[r+1])) break; 3453 } 3454 remote[l].rank = r; 3455 remote[l].index = offset - ranges[r]; 3456 } 3457 } else { 3458 for(d = 0; d < gsize; ++d, ++l) { 3459 remote[l].rank = rank; 3460 remote[l].index = goff+d - ranges[rank]; 3461 } 3462 } 3463 } 3464 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3465 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3466 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3467 PetscFunctionReturn(0); 3468 } 3469 3470 #undef __FUNCT__ 3471 #define __FUNCT__ "DMGetPointSF" 3472 /*@ 3473 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3474 3475 Input Parameter: 3476 . dm - The DM 3477 3478 Output Parameter: 3479 . sf - The PetscSF 3480 3481 Level: intermediate 3482 3483 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3484 3485 .seealso: DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3486 @*/ 3487 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) { 3488 PetscFunctionBegin; 3489 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3490 PetscValidPointer(sf, 2); 3491 *sf = dm->sf; 3492 PetscFunctionReturn(0); 3493 } 3494 3495 #undef __FUNCT__ 3496 #define __FUNCT__ "DMGetNumFields" 3497 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3498 { 3499 PetscFunctionBegin; 3500 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3501 PetscValidPointer(numFields, 2); 3502 *numFields = dm->numFields; 3503 PetscFunctionReturn(0); 3504 } 3505 3506 #undef __FUNCT__ 3507 #define __FUNCT__ "DMSetNumFields" 3508 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3509 { 3510 PetscInt f; 3511 PetscErrorCode ierr; 3512 3513 PetscFunctionBegin; 3514 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3515 for (f = 0; f < dm->numFields; ++f) { 3516 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3517 } 3518 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3519 dm->numFields = numFields; 3520 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3521 for (f = 0; f < dm->numFields; ++f) { 3522 ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3523 } 3524 PetscFunctionReturn(0); 3525 } 3526 3527 #undef __FUNCT__ 3528 #define __FUNCT__ "DMGetField" 3529 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3530 { 3531 PetscFunctionBegin; 3532 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3533 PetscValidPointer(field, 2); 3534 if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3535 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3536 *field = dm->fields[f]; 3537 PetscFunctionReturn(0); 3538 } 3539 3540 #undef __FUNCT__ 3541 #define __FUNCT__ "DMSetCoordinates" 3542 /*@ 3543 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3544 3545 Collective on DM 3546 3547 Input Parameters: 3548 + dm - the DM 3549 - c - coordinate vector 3550 3551 Note: 3552 The coordinates do include those for ghost points, which are in the local vector 3553 3554 Level: intermediate 3555 3556 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3557 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3558 @*/ 3559 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3560 { 3561 PetscErrorCode ierr; 3562 3563 PetscFunctionBegin; 3564 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3565 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3566 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3567 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3568 dm->coordinates = c; 3569 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3570 PetscFunctionReturn(0); 3571 } 3572 3573 #undef __FUNCT__ 3574 #define __FUNCT__ "DMSetCoordinatesLocal" 3575 /*@ 3576 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3577 3578 Collective on DM 3579 3580 Input Parameters: 3581 + dm - the DM 3582 - c - coordinate vector 3583 3584 Note: 3585 The coordinates of ghost points can be set using DMSetCoordinates() 3586 followed by DMGetCoordinatesLocal(). This is intended to enable the 3587 setting of ghost coordinates outside of the domain. 3588 3589 Level: intermediate 3590 3591 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3592 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3593 @*/ 3594 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3595 { 3596 PetscErrorCode ierr; 3597 3598 PetscFunctionBegin; 3599 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3600 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3601 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3602 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3603 dm->coordinatesLocal = c; 3604 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3605 PetscFunctionReturn(0); 3606 } 3607 3608 #undef __FUNCT__ 3609 #define __FUNCT__ "DMGetCoordinates" 3610 /*@ 3611 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3612 3613 Not Collective 3614 3615 Input Parameter: 3616 . dm - the DM 3617 3618 Output Parameter: 3619 . c - global coordinate vector 3620 3621 Note: 3622 This is a borrowed reference, so the user should NOT destroy this vector 3623 3624 Each process has only the local coordinates (does NOT have the ghost coordinates). 3625 3626 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3627 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3628 3629 Level: intermediate 3630 3631 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3632 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3633 @*/ 3634 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3635 { 3636 PetscErrorCode ierr; 3637 3638 PetscFunctionBegin; 3639 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3640 PetscValidPointer(c,2); 3641 if (!dm->coordinates && dm->coordinatesLocal) { 3642 DM cdm; 3643 3644 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3645 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3646 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3647 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3648 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3649 } 3650 *c = dm->coordinates; 3651 PetscFunctionReturn(0); 3652 } 3653 3654 #undef __FUNCT__ 3655 #define __FUNCT__ "DMGetCoordinatesLocal" 3656 /*@ 3657 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3658 3659 Collective on DM 3660 3661 Input Parameter: 3662 . dm - the DM 3663 3664 Output Parameter: 3665 . c - coordinate vector 3666 3667 Note: 3668 This is a borrowed reference, so the user should NOT destroy this vector 3669 3670 Each process has the local and ghost coordinates 3671 3672 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3673 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3674 3675 Level: intermediate 3676 3677 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3678 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3679 @*/ 3680 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3681 { 3682 PetscErrorCode ierr; 3683 3684 PetscFunctionBegin; 3685 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3686 PetscValidPointer(c,2); 3687 if (!dm->coordinatesLocal && dm->coordinates) { 3688 DM cdm; 3689 3690 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3691 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3692 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3693 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3694 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3695 } 3696 *c = dm->coordinatesLocal; 3697 PetscFunctionReturn(0); 3698 } 3699 3700 #undef __FUNCT__ 3701 #define __FUNCT__ "DMGetCoordinateDM" 3702 /*@ 3703 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3704 3705 Collective on DM 3706 3707 Input Parameter: 3708 . dm - the DM 3709 3710 Output Parameter: 3711 . cdm - coordinate DM 3712 3713 Level: intermediate 3714 3715 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3716 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3717 @*/ 3718 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3719 { 3720 PetscErrorCode ierr; 3721 3722 PetscFunctionBegin; 3723 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3724 PetscValidPointer(cdm,2); 3725 if (!dm->coordinateDM) { 3726 if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3727 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3728 } 3729 *cdm = dm->coordinateDM; 3730 PetscFunctionReturn(0); 3731 } 3732