1 #include <petscsnes.h> 2 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = PETSC_NULL; 35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 36 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 39 #endif 40 41 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 42 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 43 44 45 v->ltogmap = PETSC_NULL; 46 v->ltogmapb = PETSC_NULL; 47 v->bs = 1; 48 v->coloringtype = IS_COLORING_GLOBAL; 49 v->lf = PETSC_NULL; 50 v->lj = PETSC_NULL; 51 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 52 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 53 v->defaultSection = PETSC_NULL; 54 v->defaultGlobalSection = PETSC_NULL; 55 { 56 PetscInt i; 57 for (i = 0; i < 10; ++i) { 58 v->nullspaceConstructors[i] = PETSC_NULL; 59 } 60 } 61 v->numFields = 0; 62 v->fields = PETSC_NULL; 63 64 *dm = v; 65 PetscFunctionReturn(0); 66 } 67 68 #undef __FUNCT__ 69 #define __FUNCT__ "DMSetVecType" 70 /*@C 71 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 72 73 Logically Collective on DMDA 74 75 Input Parameter: 76 + da - initial distributed array 77 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 78 79 Options Database: 80 . -dm_vec_type ctype 81 82 Level: intermediate 83 84 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 85 @*/ 86 PetscErrorCode DMSetVecType(DM da,VecType ctype) 87 { 88 PetscErrorCode ierr; 89 90 PetscFunctionBegin; 91 PetscValidHeaderSpecific(da,DM_CLASSID,1); 92 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 93 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 94 PetscFunctionReturn(0); 95 } 96 97 #undef __FUNCT__ 98 #define __FUNCT__ "VecGetDM" 99 /*@ 100 VecGetDM - Gets the DM defining the data layout of the vector 101 102 Not collective 103 104 Input Parameter: 105 . v - The Vec 106 107 Output Parameter: 108 . dm - The DM 109 110 Level: intermediate 111 112 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 113 @*/ 114 PetscErrorCode VecGetDM(Vec v, DM *dm) 115 { 116 PetscErrorCode ierr; 117 118 PetscFunctionBegin; 119 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 120 PetscValidPointer(dm,2); 121 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr); 122 PetscFunctionReturn(0); 123 } 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "VecSetDM" 127 /*@ 128 VecSetDM - Sets the DM defining the data layout of the vector 129 130 Not collective 131 132 Input Parameters: 133 + v - The Vec 134 - dm - The DM 135 136 Level: intermediate 137 138 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 139 @*/ 140 PetscErrorCode VecSetDM(Vec v, DM dm) 141 { 142 PetscErrorCode ierr; 143 144 PetscFunctionBegin; 145 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 146 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 147 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 148 PetscFunctionReturn(0); 149 } 150 151 #undef __FUNCT__ 152 #define __FUNCT__ "DMSetMatType" 153 /*@C 154 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 155 156 Logically Collective on DM 157 158 Input Parameter: 159 + dm - the DM context 160 . ctype - the matrix type 161 162 Options Database: 163 . -dm_mat_type ctype 164 165 Level: intermediate 166 167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 168 @*/ 169 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 170 { 171 PetscErrorCode ierr; 172 PetscFunctionBegin; 173 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 174 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 175 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 176 PetscFunctionReturn(0); 177 } 178 179 #undef __FUNCT__ 180 #define __FUNCT__ "MatGetDM" 181 /*@ 182 MatGetDM - Gets the DM defining the data layout of the matrix 183 184 Not collective 185 186 Input Parameter: 187 . A - The Mat 188 189 Output Parameter: 190 . dm - The DM 191 192 Level: intermediate 193 194 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 195 @*/ 196 PetscErrorCode MatGetDM(Mat A, DM *dm) 197 { 198 PetscErrorCode ierr; 199 200 PetscFunctionBegin; 201 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 202 PetscValidPointer(dm,2); 203 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr); 204 PetscFunctionReturn(0); 205 } 206 207 #undef __FUNCT__ 208 #define __FUNCT__ "MatSetDM" 209 /*@ 210 MatSetDM - Sets the DM defining the data layout of the matrix 211 212 Not collective 213 214 Input Parameters: 215 + A - The Mat 216 - dm - The DM 217 218 Level: intermediate 219 220 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 221 @*/ 222 PetscErrorCode MatSetDM(Mat A, DM dm) 223 { 224 PetscErrorCode ierr; 225 226 PetscFunctionBegin; 227 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 228 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 229 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 230 PetscFunctionReturn(0); 231 } 232 233 #undef __FUNCT__ 234 #define __FUNCT__ "DMSetOptionsPrefix" 235 /*@C 236 DMSetOptionsPrefix - Sets the prefix used for searching for all 237 DMDA options in the database. 238 239 Logically Collective on DMDA 240 241 Input Parameter: 242 + da - the DMDA context 243 - prefix - the prefix to prepend to all option names 244 245 Notes: 246 A hyphen (-) must NOT be given at the beginning of the prefix name. 247 The first character of all runtime options is AUTOMATICALLY the hyphen. 248 249 Level: advanced 250 251 .keywords: DMDA, set, options, prefix, database 252 253 .seealso: DMSetFromOptions() 254 @*/ 255 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 256 { 257 PetscErrorCode ierr; 258 259 PetscFunctionBegin; 260 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 261 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 262 PetscFunctionReturn(0); 263 } 264 265 #undef __FUNCT__ 266 #define __FUNCT__ "DMDestroy" 267 /*@ 268 DMDestroy - Destroys a vector packer or DMDA. 269 270 Collective on DM 271 272 Input Parameter: 273 . dm - the DM object to destroy 274 275 Level: developer 276 277 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 278 279 @*/ 280 PetscErrorCode DMDestroy(DM *dm) 281 { 282 PetscInt i, cnt = 0, f; 283 DMNamedVecLink nlink,nnext; 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 if (!*dm) PetscFunctionReturn(0); 288 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 289 290 /* count all the circular references of DM and its contained Vecs */ 291 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 292 if ((*dm)->localin[i]) {cnt++;} 293 if ((*dm)->globalin[i]) {cnt++;} 294 } 295 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 296 if ((*dm)->x) { 297 DM obj; 298 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 299 if (obj == *dm) cnt++; 300 } 301 302 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 303 /* 304 Need this test because the dm references the vectors that 305 reference the dm, so destroying the dm calls destroy on the 306 vectors that cause another destroy on the dm 307 */ 308 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 309 ((PetscObject) (*dm))->refct = 0; 310 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 311 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 312 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 313 } 314 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 315 nnext = nlink->next; 316 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 317 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 318 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 319 ierr = PetscFree(nlink);CHKERRQ(ierr); 320 } 321 (*dm)->namedglobal = PETSC_NULL; 322 323 /* Destroy the list of hooks */ 324 { 325 DMCoarsenHookLink link,next; 326 for (link=(*dm)->coarsenhook; link; link=next) { 327 next = link->next; 328 ierr = PetscFree(link);CHKERRQ(ierr); 329 } 330 (*dm)->coarsenhook = PETSC_NULL; 331 } 332 { 333 DMRefineHookLink link,next; 334 for (link=(*dm)->refinehook; link; link=next) { 335 next = link->next; 336 ierr = PetscFree(link);CHKERRQ(ierr); 337 } 338 (*dm)->refinehook = PETSC_NULL; 339 } 340 /* Destroy the work arrays */ 341 { 342 DMWorkLink link,next; 343 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 344 for (link=(*dm)->workin; link; link=next) { 345 next = link->next; 346 ierr = PetscFree(link->mem);CHKERRQ(ierr); 347 ierr = PetscFree(link);CHKERRQ(ierr); 348 } 349 (*dm)->workin = PETSC_NULL; 350 } 351 352 if ((*dm)->ctx && (*dm)->ctxdestroy) { 353 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 354 } 355 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 356 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 357 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 358 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 359 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 360 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 361 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 362 363 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 364 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 365 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 366 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 367 368 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 369 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 370 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 371 372 for (f = 0; f < (*dm)->numFields; ++f) { 373 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 374 } 375 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 376 /* if memory was published with AMS then destroy it */ 377 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 378 379 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 380 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 381 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 382 PetscFunctionReturn(0); 383 } 384 385 #undef __FUNCT__ 386 #define __FUNCT__ "DMSetUp" 387 /*@ 388 DMSetUp - sets up the data structures inside a DM object 389 390 Collective on DM 391 392 Input Parameter: 393 . dm - the DM object to setup 394 395 Level: developer 396 397 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 398 399 @*/ 400 PetscErrorCode DMSetUp(DM dm) 401 { 402 PetscErrorCode ierr; 403 404 PetscFunctionBegin; 405 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 406 if (dm->setupcalled) PetscFunctionReturn(0); 407 if (dm->ops->setup) { 408 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 409 } 410 dm->setupcalled = PETSC_TRUE; 411 PetscFunctionReturn(0); 412 } 413 414 #undef __FUNCT__ 415 #define __FUNCT__ "DMSetFromOptions" 416 /*@ 417 DMSetFromOptions - sets parameters in a DM from the options database 418 419 Collective on DM 420 421 Input Parameter: 422 . dm - the DM object to set options for 423 424 Options Database: 425 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 426 . -dm_vec_type <type> type of vector to create inside DM 427 . -dm_mat_type <type> type of matrix to create inside DM 428 - -dm_coloring_type <global or ghosted> 429 430 Level: developer 431 432 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 433 434 @*/ 435 PetscErrorCode DMSetFromOptions(DM dm) 436 { 437 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 438 PetscErrorCode ierr; 439 char typeName[256] = MATAIJ; 440 441 PetscFunctionBegin; 442 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 443 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 444 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 445 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 446 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 447 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 448 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 449 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 450 if (flg) { 451 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 452 } 453 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 454 if (flg) { 455 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 456 } 457 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 458 if (dm->ops->setfromoptions) { 459 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 460 } 461 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 462 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 463 ierr = PetscOptionsEnd();CHKERRQ(ierr); 464 if (flg1) { 465 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 466 } 467 if (flg2) { 468 PetscViewer viewer; 469 470 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 471 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 472 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 473 ierr = DMView(dm, viewer);CHKERRQ(ierr); 474 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 475 } 476 if (flg3) { 477 PetscViewer viewer; 478 479 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 480 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 481 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 482 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 483 ierr = DMView(dm, viewer);CHKERRQ(ierr); 484 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 485 } 486 if (flg4) { 487 PetscViewer viewer; 488 489 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 490 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 491 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 492 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 493 ierr = DMView(dm, viewer);CHKERRQ(ierr); 494 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 495 } 496 PetscFunctionReturn(0); 497 } 498 499 #undef __FUNCT__ 500 #define __FUNCT__ "DMView" 501 /*@C 502 DMView - Views a vector packer or DMDA. 503 504 Collective on DM 505 506 Input Parameter: 507 + dm - the DM object to view 508 - v - the viewer 509 510 Level: developer 511 512 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 513 514 @*/ 515 PetscErrorCode DMView(DM dm,PetscViewer v) 516 { 517 PetscErrorCode ierr; 518 519 PetscFunctionBegin; 520 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 521 if (!v) { 522 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 523 } 524 if (dm->ops->view) { 525 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 526 } 527 PetscFunctionReturn(0); 528 } 529 530 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer); 531 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer); 532 533 #undef __FUNCT__ 534 #define __FUNCT__ "DMCreateGlobalVector" 535 /*@ 536 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 537 538 Collective on DM 539 540 Input Parameter: 541 . dm - the DM object 542 543 Output Parameter: 544 . vec - the global vector 545 546 Level: beginner 547 548 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 549 550 @*/ 551 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 552 { 553 PetscErrorCode ierr; 554 555 PetscFunctionBegin; 556 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 557 if (dm->defaultSection) { 558 PetscSection gSection; 559 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 560 561 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 562 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 563 for(p = pStart; p < pEnd; ++p) { 564 PetscInt dof, cdof; 565 566 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 567 ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr); 568 if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof; 569 if ((dof > 0) && (dof-cdof != blockSize)) { 570 blockSize = 1; 571 break; 572 } 573 } 574 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 575 if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize); 576 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 577 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 578 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 579 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 580 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 581 ierr = VecSetDM(*vec, dm);CHKERRQ(ierr); 582 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 583 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 584 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 585 ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr); 586 } else { 587 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 588 } 589 PetscFunctionReturn(0); 590 } 591 592 #undef __FUNCT__ 593 #define __FUNCT__ "DMCreateLocalVector" 594 /*@ 595 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 596 597 Not Collective 598 599 Input Parameter: 600 . dm - the DM object 601 602 Output Parameter: 603 . vec - the local vector 604 605 Level: beginner 606 607 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 608 609 @*/ 610 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 611 { 612 PetscErrorCode ierr; 613 614 PetscFunctionBegin; 615 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 616 if (dm->defaultSection) { 617 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 618 619 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 620 for(p = pStart; p < pEnd; ++p) { 621 PetscInt dof; 622 623 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 624 if ((blockSize < 0) && (dof > 0)) blockSize = dof; 625 if ((dof > 0) && (dof != blockSize)) { 626 blockSize = 1; 627 break; 628 } 629 } 630 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 631 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 632 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 633 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 634 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 635 ierr = VecSetDM(*vec, dm);CHKERRQ(ierr); 636 ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr); 637 } else { 638 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 639 } 640 PetscFunctionReturn(0); 641 } 642 643 #undef __FUNCT__ 644 #define __FUNCT__ "DMGetLocalToGlobalMapping" 645 /*@ 646 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 647 648 Collective on DM 649 650 Input Parameter: 651 . dm - the DM that provides the mapping 652 653 Output Parameter: 654 . ltog - the mapping 655 656 Level: intermediate 657 658 Notes: 659 This mapping can then be used by VecSetLocalToGlobalMapping() or 660 MatSetLocalToGlobalMapping(). 661 662 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 663 @*/ 664 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 665 { 666 PetscErrorCode ierr; 667 668 PetscFunctionBegin; 669 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 670 PetscValidPointer(ltog,2); 671 if (!dm->ltogmap) { 672 PetscSection section, sectionGlobal; 673 674 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 675 if (section) { 676 PetscInt *ltog; 677 PetscInt pStart, pEnd, size, p, l; 678 679 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 680 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 681 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 682 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 683 for (p = pStart, l = 0; p < pEnd; ++p) { 684 PetscInt dof, off, c; 685 686 /* Should probably use constrained dofs */ 687 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 688 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 689 for (c = 0; c < dof; ++c, ++l) { 690 ltog[l] = off+c; 691 } 692 } 693 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 694 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 695 } else { 696 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 697 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 698 } 699 } 700 *ltog = dm->ltogmap; 701 PetscFunctionReturn(0); 702 } 703 704 #undef __FUNCT__ 705 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 706 /*@ 707 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 708 709 Collective on DM 710 711 Input Parameter: 712 . da - the distributed array that provides the mapping 713 714 Output Parameter: 715 . ltog - the block mapping 716 717 Level: intermediate 718 719 Notes: 720 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 721 MatSetLocalToGlobalMappingBlock(). 722 723 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 724 @*/ 725 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 726 { 727 PetscErrorCode ierr; 728 729 PetscFunctionBegin; 730 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 731 PetscValidPointer(ltog,2); 732 if (!dm->ltogmapb) { 733 PetscInt bs; 734 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 735 if (bs > 1) { 736 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 737 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 738 } else { 739 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 740 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 741 } 742 } 743 *ltog = dm->ltogmapb; 744 PetscFunctionReturn(0); 745 } 746 747 #undef __FUNCT__ 748 #define __FUNCT__ "DMGetBlockSize" 749 /*@ 750 DMGetBlockSize - Gets the inherent block size associated with a DM 751 752 Not Collective 753 754 Input Parameter: 755 . dm - the DM with block structure 756 757 Output Parameter: 758 . bs - the block size, 1 implies no exploitable block structure 759 760 Level: intermediate 761 762 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 763 @*/ 764 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 765 { 766 PetscFunctionBegin; 767 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 768 PetscValidPointer(bs,2); 769 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 770 *bs = dm->bs; 771 PetscFunctionReturn(0); 772 } 773 774 #undef __FUNCT__ 775 #define __FUNCT__ "DMCreateInterpolation" 776 /*@ 777 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 778 779 Collective on DM 780 781 Input Parameter: 782 + dm1 - the DM object 783 - dm2 - the second, finer DM object 784 785 Output Parameter: 786 + mat - the interpolation 787 - vec - the scaling (optional) 788 789 Level: developer 790 791 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 792 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 793 794 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 795 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 796 797 798 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 799 800 @*/ 801 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 802 { 803 PetscErrorCode ierr; 804 805 PetscFunctionBegin; 806 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 807 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 808 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 809 PetscFunctionReturn(0); 810 } 811 812 #undef __FUNCT__ 813 #define __FUNCT__ "DMCreateInjection" 814 /*@ 815 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 816 817 Collective on DM 818 819 Input Parameter: 820 + dm1 - the DM object 821 - dm2 - the second, finer DM object 822 823 Output Parameter: 824 . ctx - the injection 825 826 Level: developer 827 828 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 829 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 830 831 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 832 833 @*/ 834 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 835 { 836 PetscErrorCode ierr; 837 838 PetscFunctionBegin; 839 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 840 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 841 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 842 PetscFunctionReturn(0); 843 } 844 845 #undef __FUNCT__ 846 #define __FUNCT__ "DMCreateColoring" 847 /*@C 848 DMCreateColoring - Gets coloring for a DMDA or DMComposite 849 850 Collective on DM 851 852 Input Parameter: 853 + dm - the DM object 854 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 855 - matype - either MATAIJ or MATBAIJ 856 857 Output Parameter: 858 . coloring - the coloring 859 860 Level: developer 861 862 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 863 864 @*/ 865 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 866 { 867 PetscErrorCode ierr; 868 869 PetscFunctionBegin; 870 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 871 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 872 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 873 PetscFunctionReturn(0); 874 } 875 876 #undef __FUNCT__ 877 #define __FUNCT__ "DMCreateMatrix" 878 /*@C 879 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 880 881 Collective on DM 882 883 Input Parameter: 884 + dm - the DM object 885 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 886 any type which inherits from one of these (such as MATAIJ) 887 888 Output Parameter: 889 . mat - the empty Jacobian 890 891 Level: beginner 892 893 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 894 do not need to do it yourself. 895 896 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 897 the nonzero pattern call DMDASetMatPreallocateOnly() 898 899 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 900 internally by PETSc. 901 902 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 903 the indices for the global numbering for DMDAs which is complicated. 904 905 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 906 907 @*/ 908 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 909 { 910 PetscErrorCode ierr; 911 912 PetscFunctionBegin; 913 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 914 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 915 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 916 #endif 917 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 918 PetscValidPointer(mat,3); 919 if (dm->mattype) { 920 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 921 } else { 922 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 923 } 924 PetscFunctionReturn(0); 925 } 926 927 #undef __FUNCT__ 928 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 929 /*@ 930 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 931 preallocated but the nonzero structure and zero values will not be set. 932 933 Logically Collective on DMDA 934 935 Input Parameter: 936 + dm - the DM 937 - only - PETSC_TRUE if only want preallocation 938 939 Level: developer 940 .seealso DMCreateMatrix() 941 @*/ 942 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 943 { 944 PetscFunctionBegin; 945 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 946 dm->prealloc_only = only; 947 PetscFunctionReturn(0); 948 } 949 950 #undef __FUNCT__ 951 #define __FUNCT__ "DMGetWorkArray" 952 /*@C 953 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 954 955 Not Collective 956 957 Input Parameters: 958 + dm - the DM object 959 . count - The minium size 960 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 961 962 Output Parameter: 963 . array - the work array 964 965 Level: developer 966 967 .seealso DMDestroy(), DMCreate() 968 @*/ 969 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 970 { 971 PetscErrorCode ierr; 972 DMWorkLink link; 973 size_t size; 974 975 PetscFunctionBegin; 976 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 977 PetscValidPointer(mem,4); 978 if (dm->workin) { 979 link = dm->workin; 980 dm->workin = dm->workin->next; 981 } else { 982 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 983 } 984 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 985 if (size*count > link->bytes) { 986 ierr = PetscFree(link->mem);CHKERRQ(ierr); 987 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 988 link->bytes = size*count; 989 } 990 link->next = dm->workout; 991 dm->workout = link; 992 *(void**)mem = link->mem; 993 PetscFunctionReturn(0); 994 } 995 996 #undef __FUNCT__ 997 #define __FUNCT__ "DMRestoreWorkArray" 998 /*@C 999 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1000 1001 Not Collective 1002 1003 Input Parameters: 1004 + dm - the DM object 1005 . count - The minium size 1006 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1007 1008 Output Parameter: 1009 . array - the work array 1010 1011 Level: developer 1012 1013 .seealso DMDestroy(), DMCreate() 1014 @*/ 1015 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1016 { 1017 DMWorkLink *p,link; 1018 1019 PetscFunctionBegin; 1020 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1021 PetscValidPointer(mem,4); 1022 for (p=&dm->workout; (link=*p); p=&link->next) { 1023 if (link->mem == *(void**)mem) { 1024 *p = link->next; 1025 link->next = dm->workin; 1026 dm->workin = link; 1027 *(void**)mem = PETSC_NULL; 1028 PetscFunctionReturn(0); 1029 } 1030 } 1031 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1032 PetscFunctionReturn(0); 1033 } 1034 1035 #undef __FUNCT__ 1036 #define __FUNCT__ "DMSetNullSpaceConstructor" 1037 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1038 { 1039 PetscFunctionBegin; 1040 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1041 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1042 dm->nullspaceConstructors[field] = nullsp; 1043 PetscFunctionReturn(0); 1044 } 1045 1046 #undef __FUNCT__ 1047 #define __FUNCT__ "DMCreateFieldIS" 1048 /*@C 1049 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1050 1051 Not collective 1052 1053 Input Parameter: 1054 . dm - the DM object 1055 1056 Output Parameters: 1057 + numFields - The number of fields (or PETSC_NULL if not requested) 1058 . fieldNames - The name for each field (or PETSC_NULL if not requested) 1059 - fields - The global indices for each field (or PETSC_NULL if not requested) 1060 1061 Level: intermediate 1062 1063 Notes: 1064 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1065 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1066 PetscFree(). 1067 1068 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1069 @*/ 1070 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1071 { 1072 PetscSection section, sectionGlobal; 1073 PetscErrorCode ierr; 1074 1075 PetscFunctionBegin; 1076 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1077 if (numFields) { 1078 PetscValidPointer(numFields,2); 1079 *numFields = 0; 1080 } 1081 if (fieldNames) { 1082 PetscValidPointer(fieldNames,3); 1083 *fieldNames = PETSC_NULL; 1084 } 1085 if (fields) { 1086 PetscValidPointer(fields,4); 1087 *fields = PETSC_NULL; 1088 } 1089 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1090 if (section) { 1091 PetscInt *fieldSizes, **fieldIndices; 1092 PetscInt nF, f, pStart, pEnd, p; 1093 1094 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1095 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1096 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 1097 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1098 for (f = 0; f < nF; ++f) { 1099 fieldSizes[f] = 0; 1100 } 1101 for (p = pStart; p < pEnd; ++p) { 1102 PetscInt gdof; 1103 1104 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1105 if (gdof > 0) { 1106 for (f = 0; f < nF; ++f) { 1107 PetscInt fdof, fcdof; 1108 1109 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1110 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1111 fieldSizes[f] += fdof-fcdof; 1112 } 1113 } 1114 } 1115 for (f = 0; f < nF; ++f) { 1116 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1117 fieldSizes[f] = 0; 1118 } 1119 for (p = pStart; p < pEnd; ++p) { 1120 PetscInt gdof, goff; 1121 1122 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1123 if (gdof > 0) { 1124 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1125 for (f = 0; f < nF; ++f) { 1126 PetscInt fdof, fcdof, fc; 1127 1128 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1129 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1130 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1131 fieldIndices[f][fieldSizes[f]] = goff++; 1132 } 1133 } 1134 } 1135 } 1136 if (numFields) {*numFields = nF;} 1137 if (fieldNames) { 1138 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 1139 for (f = 0; f < nF; ++f) { 1140 const char *fieldName; 1141 1142 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1143 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 1144 } 1145 } 1146 if (fields) { 1147 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1148 for (f = 0; f < nF; ++f) { 1149 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1150 } 1151 } 1152 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1153 } else { 1154 if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 1155 } 1156 PetscFunctionReturn(0); 1157 } 1158 1159 #undef __FUNCT__ 1160 #define __FUNCT__ "DMCreateFieldDecompositionDM" 1161 /*@C 1162 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 1163 1164 Not Collective 1165 1166 Input Parameters: 1167 + dm - the DM object 1168 - name - the name of the field decomposition 1169 1170 Output Parameter: 1171 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 1172 1173 Level: advanced 1174 1175 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1176 @*/ 1177 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 1178 { 1179 PetscErrorCode ierr; 1180 1181 PetscFunctionBegin; 1182 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1183 PetscValidCharPointer(name,2); 1184 PetscValidPointer(ddm,3); 1185 *ddm = PETSC_NULL; 1186 if (dm->ops->createfielddecompositiondm) { 1187 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1188 } 1189 PetscFunctionReturn(0); 1190 } 1191 1192 #undef __FUNCT__ 1193 #define __FUNCT__ "DMCreateFieldDecomposition" 1194 /*@C 1195 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1196 corresponding to different fields: each IS contains the global indices of the dofs of the 1197 corresponding field. The optional list of DMs define the DM for each subproblem. 1198 Generalizes DMCreateFieldIS(). 1199 1200 Not collective 1201 1202 Input Parameter: 1203 . dm - the DM object 1204 1205 Output Parameters: 1206 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1207 . namelist - The name for each field (or PETSC_NULL if not requested) 1208 . islist - The global indices for each field (or PETSC_NULL if not requested) 1209 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1210 1211 Level: intermediate 1212 1213 Notes: 1214 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1215 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1216 and all of the arrays should be freed with PetscFree(). 1217 1218 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1219 @*/ 1220 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1221 { 1222 PetscErrorCode ierr; 1223 1224 PetscFunctionBegin; 1225 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1226 if (len) {PetscValidPointer(len,2); *len = 0;} 1227 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1228 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1229 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1230 if (!dm->ops->createfielddecomposition) { 1231 PetscSection section; 1232 PetscInt numFields, f; 1233 1234 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1235 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1236 if (section && numFields && dm->ops->createsubdm) { 1237 *len = numFields; 1238 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1239 for (f = 0; f < numFields; ++f) { 1240 const char *fieldName; 1241 1242 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1243 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1244 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1245 } 1246 } else { 1247 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1248 /* By default there are no DMs associated with subproblems. */ 1249 if (dmlist) *dmlist = PETSC_NULL; 1250 } 1251 } 1252 else { 1253 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1254 } 1255 PetscFunctionReturn(0); 1256 } 1257 1258 #undef __FUNCT__ 1259 #define __FUNCT__ "DMCreateSubDM" 1260 /*@C 1261 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1262 The fields are defined by DMCreateFieldIS(). 1263 1264 Not collective 1265 1266 Input Parameters: 1267 + dm - the DM object 1268 . numFields - number of fields in this subproblem 1269 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1270 1271 Output Parameters: 1272 . is - The global indices for the subproblem 1273 - dm - The DM for the subproblem 1274 1275 Level: intermediate 1276 1277 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1278 @*/ 1279 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1280 { 1281 PetscErrorCode ierr; 1282 1283 PetscFunctionBegin; 1284 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1285 PetscValidPointer(fields,3); 1286 if (is) {PetscValidPointer(is,4);} 1287 if (subdm) {PetscValidPointer(subdm,5);} 1288 if (dm->ops->createsubdm) { 1289 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1290 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1291 PetscFunctionReturn(0); 1292 } 1293 1294 #undef __FUNCT__ 1295 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1296 /*@C 1297 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1298 1299 Not Collective 1300 1301 Input Parameters: 1302 + dm - the DM object 1303 - name - the name of the subdomain decomposition 1304 1305 Output Parameter: 1306 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1307 1308 Level: advanced 1309 1310 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1311 @*/ 1312 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1313 { 1314 PetscErrorCode ierr; 1315 1316 PetscFunctionBegin; 1317 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1318 PetscValidCharPointer(name,2); 1319 PetscValidPointer(ddm,3); 1320 *ddm = PETSC_NULL; 1321 if (dm->ops->createdomaindecompositiondm) { 1322 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1323 } 1324 PetscFunctionReturn(0); 1325 } 1326 1327 #undef __FUNCT__ 1328 #define __FUNCT__ "DMCreateDomainDecomposition" 1329 /*@C 1330 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1331 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1332 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1333 define a nonoverlapping covering, while outer subdomains can overlap. 1334 The optional list of DMs define the DM for each subproblem. 1335 1336 Not collective 1337 1338 Input Parameter: 1339 . dm - the DM object 1340 1341 Output Parameters: 1342 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1343 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1344 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1345 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1346 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1347 1348 Level: intermediate 1349 1350 Notes: 1351 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1352 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1353 and all of the arrays should be freed with PetscFree(). 1354 1355 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1356 @*/ 1357 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1358 { 1359 PetscErrorCode ierr; 1360 1361 PetscFunctionBegin; 1362 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1363 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1364 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1365 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1366 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1367 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1368 if (dm->ops->createdomaindecomposition) { 1369 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1370 } 1371 PetscFunctionReturn(0); 1372 } 1373 1374 #undef __FUNCT__ 1375 #define __FUNCT__ "DMRefine" 1376 /*@ 1377 DMRefine - Refines a DM object 1378 1379 Collective on DM 1380 1381 Input Parameter: 1382 + dm - the DM object 1383 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1384 1385 Output Parameter: 1386 . dmf - the refined DM, or PETSC_NULL 1387 1388 Note: If no refinement was done, the return value is PETSC_NULL 1389 1390 Level: developer 1391 1392 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1393 @*/ 1394 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1395 { 1396 PetscErrorCode ierr; 1397 DMRefineHookLink link; 1398 1399 PetscFunctionBegin; 1400 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1401 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1402 if (*dmf) { 1403 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1404 (*dmf)->ops->initialguess = dm->ops->initialguess; 1405 (*dmf)->ops->function = dm->ops->function; 1406 (*dmf)->ops->functionj = dm->ops->functionj; 1407 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1408 (*dmf)->ops->jacobian = dm->ops->jacobian; 1409 } 1410 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1411 (*dmf)->ctx = dm->ctx; 1412 (*dmf)->leveldown = dm->leveldown; 1413 (*dmf)->levelup = dm->levelup + 1; 1414 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1415 for (link=dm->refinehook; link; link=link->next) { 1416 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1417 } 1418 } 1419 PetscFunctionReturn(0); 1420 } 1421 1422 #undef __FUNCT__ 1423 #define __FUNCT__ "DMRefineHookAdd" 1424 /*@ 1425 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1426 1427 Logically Collective 1428 1429 Input Arguments: 1430 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1431 . refinehook - function to run when setting up a coarser level 1432 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1433 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1434 1435 Calling sequence of refinehook: 1436 $ refinehook(DM coarse,DM fine,void *ctx); 1437 1438 + coarse - coarse level DM 1439 . fine - fine level DM to interpolate problem to 1440 - ctx - optional user-defined function context 1441 1442 Calling sequence for interphook: 1443 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1444 1445 + coarse - coarse level DM 1446 . interp - matrix interpolating a coarse-level solution to the finer grid 1447 . fine - fine level DM to update 1448 - ctx - optional user-defined function context 1449 1450 Level: advanced 1451 1452 Notes: 1453 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1454 1455 If this function is called multiple times, the hooks will be run in the order they are added. 1456 1457 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1458 @*/ 1459 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1460 { 1461 PetscErrorCode ierr; 1462 DMRefineHookLink link,*p; 1463 1464 PetscFunctionBegin; 1465 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1466 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1467 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1468 link->refinehook = refinehook; 1469 link->interphook = interphook; 1470 link->ctx = ctx; 1471 link->next = PETSC_NULL; 1472 *p = link; 1473 PetscFunctionReturn(0); 1474 } 1475 1476 #undef __FUNCT__ 1477 #define __FUNCT__ "DMInterpolate" 1478 /*@ 1479 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1480 1481 Collective if any hooks are 1482 1483 Input Arguments: 1484 + coarse - coarser DM to use as a base 1485 . restrct - interpolation matrix, apply using MatInterpolate() 1486 - fine - finer DM to update 1487 1488 Level: developer 1489 1490 .seealso: DMRefineHookAdd(), MatInterpolate() 1491 @*/ 1492 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1493 { 1494 PetscErrorCode ierr; 1495 DMRefineHookLink link; 1496 1497 PetscFunctionBegin; 1498 for (link=fine->refinehook; link; link=link->next) { 1499 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1500 } 1501 PetscFunctionReturn(0); 1502 } 1503 1504 #undef __FUNCT__ 1505 #define __FUNCT__ "DMGetRefineLevel" 1506 /*@ 1507 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1508 1509 Not Collective 1510 1511 Input Parameter: 1512 . dm - the DM object 1513 1514 Output Parameter: 1515 . level - number of refinements 1516 1517 Level: developer 1518 1519 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1520 1521 @*/ 1522 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1523 { 1524 PetscFunctionBegin; 1525 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1526 *level = dm->levelup; 1527 PetscFunctionReturn(0); 1528 } 1529 1530 #undef __FUNCT__ 1531 #define __FUNCT__ "DMGlobalToLocalBegin" 1532 /*@ 1533 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1534 1535 Neighbor-wise Collective on DM 1536 1537 Input Parameters: 1538 + dm - the DM object 1539 . g - the global vector 1540 . mode - INSERT_VALUES or ADD_VALUES 1541 - l - the local vector 1542 1543 1544 Level: beginner 1545 1546 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1547 1548 @*/ 1549 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1550 { 1551 PetscSF sf; 1552 PetscErrorCode ierr; 1553 1554 PetscFunctionBegin; 1555 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1556 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1557 if (sf) { 1558 PetscScalar *lArray, *gArray; 1559 1560 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1561 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1562 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1563 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1564 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1565 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1566 } else { 1567 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1568 } 1569 PetscFunctionReturn(0); 1570 } 1571 1572 #undef __FUNCT__ 1573 #define __FUNCT__ "DMGlobalToLocalEnd" 1574 /*@ 1575 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1576 1577 Neighbor-wise Collective on DM 1578 1579 Input Parameters: 1580 + dm - the DM object 1581 . g - the global vector 1582 . mode - INSERT_VALUES or ADD_VALUES 1583 - l - the local vector 1584 1585 1586 Level: beginner 1587 1588 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1589 1590 @*/ 1591 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1592 { 1593 PetscSF sf; 1594 PetscErrorCode ierr; 1595 PetscScalar *lArray, *gArray; 1596 1597 PetscFunctionBegin; 1598 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1599 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1600 if (sf) { 1601 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1602 1603 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1604 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1605 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1606 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1607 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1608 } else { 1609 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1610 } 1611 PetscFunctionReturn(0); 1612 } 1613 1614 #undef __FUNCT__ 1615 #define __FUNCT__ "DMLocalToGlobalBegin" 1616 /*@ 1617 DMLocalToGlobalBegin - updates global vectors from local vectors 1618 1619 Neighbor-wise Collective on DM 1620 1621 Input Parameters: 1622 + dm - the DM object 1623 . l - the local vector 1624 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1625 base point. 1626 - - the global vector 1627 1628 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1629 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1630 global array to the final global array with VecAXPY(). 1631 1632 Level: beginner 1633 1634 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1635 1636 @*/ 1637 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1638 { 1639 PetscSF sf; 1640 PetscErrorCode ierr; 1641 1642 PetscFunctionBegin; 1643 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1644 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1645 if (sf) { 1646 MPI_Op op; 1647 PetscScalar *lArray, *gArray; 1648 1649 switch(mode) { 1650 case INSERT_VALUES: 1651 case INSERT_ALL_VALUES: 1652 #if defined(PETSC_HAVE_MPI_REPLACE) 1653 op = MPI_REPLACE; break; 1654 #else 1655 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1656 #endif 1657 case ADD_VALUES: 1658 case ADD_ALL_VALUES: 1659 op = MPI_SUM; break; 1660 default: 1661 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1662 } 1663 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1664 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1665 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1666 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1667 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1668 } else { 1669 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1670 } 1671 PetscFunctionReturn(0); 1672 } 1673 1674 #undef __FUNCT__ 1675 #define __FUNCT__ "DMLocalToGlobalEnd" 1676 /*@ 1677 DMLocalToGlobalEnd - updates global vectors from local vectors 1678 1679 Neighbor-wise Collective on DM 1680 1681 Input Parameters: 1682 + dm - the DM object 1683 . l - the local vector 1684 . mode - INSERT_VALUES or ADD_VALUES 1685 - g - the global vector 1686 1687 1688 Level: beginner 1689 1690 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1691 1692 @*/ 1693 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1694 { 1695 PetscSF sf; 1696 PetscErrorCode ierr; 1697 1698 PetscFunctionBegin; 1699 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1700 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1701 if (sf) { 1702 MPI_Op op; 1703 PetscScalar *lArray, *gArray; 1704 1705 switch(mode) { 1706 case INSERT_VALUES: 1707 case INSERT_ALL_VALUES: 1708 #if defined(PETSC_HAVE_MPI_REPLACE) 1709 op = MPI_REPLACE; break; 1710 #else 1711 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1712 #endif 1713 case ADD_VALUES: 1714 case ADD_ALL_VALUES: 1715 op = MPI_SUM; break; 1716 default: 1717 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1718 } 1719 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1720 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1721 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1722 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1723 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1724 } else { 1725 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1726 } 1727 PetscFunctionReturn(0); 1728 } 1729 1730 #undef __FUNCT__ 1731 #define __FUNCT__ "DMComputeJacobianDefault" 1732 /*@ 1733 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1734 1735 Collective on DM 1736 1737 Input Parameter: 1738 + dm - the DM object 1739 . x - location to compute Jacobian at; may be ignored for linear problems 1740 . A - matrix that defines the operator for the linear solve 1741 - B - the matrix used to construct the preconditioner 1742 1743 Level: developer 1744 1745 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1746 DMSetFunction() 1747 1748 @*/ 1749 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1750 { 1751 PetscErrorCode ierr; 1752 1753 PetscFunctionBegin; 1754 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1755 *stflag = SAME_NONZERO_PATTERN; 1756 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1757 if (A != B) { 1758 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1759 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1760 } 1761 PetscFunctionReturn(0); 1762 } 1763 1764 #undef __FUNCT__ 1765 #define __FUNCT__ "DMCoarsen" 1766 /*@ 1767 DMCoarsen - Coarsens a DM object 1768 1769 Collective on DM 1770 1771 Input Parameter: 1772 + dm - the DM object 1773 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1774 1775 Output Parameter: 1776 . dmc - the coarsened DM 1777 1778 Level: developer 1779 1780 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1781 1782 @*/ 1783 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1784 { 1785 PetscErrorCode ierr; 1786 DMCoarsenHookLink link; 1787 1788 PetscFunctionBegin; 1789 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1790 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1791 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1792 (*dmc)->ops->initialguess = dm->ops->initialguess; 1793 (*dmc)->ops->function = dm->ops->function; 1794 (*dmc)->ops->functionj = dm->ops->functionj; 1795 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1796 (*dmc)->ops->jacobian = dm->ops->jacobian; 1797 } 1798 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1799 (*dmc)->ctx = dm->ctx; 1800 (*dmc)->levelup = dm->levelup; 1801 (*dmc)->leveldown = dm->leveldown + 1; 1802 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1803 for (link=dm->coarsenhook; link; link=link->next) { 1804 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1805 } 1806 PetscFunctionReturn(0); 1807 } 1808 1809 #undef __FUNCT__ 1810 #define __FUNCT__ "DMCoarsenHookAdd" 1811 /*@ 1812 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1813 1814 Logically Collective 1815 1816 Input Arguments: 1817 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1818 . coarsenhook - function to run when setting up a coarser level 1819 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1820 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1821 1822 Calling sequence of coarsenhook: 1823 $ coarsenhook(DM fine,DM coarse,void *ctx); 1824 1825 + fine - fine level DM 1826 . coarse - coarse level DM to restrict problem to 1827 - ctx - optional user-defined function context 1828 1829 Calling sequence for restricthook: 1830 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1831 1832 + fine - fine level DM 1833 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1834 . rscale - scaling vector for restriction 1835 . inject - matrix restricting by injection 1836 . coarse - coarse level DM to update 1837 - ctx - optional user-defined function context 1838 1839 Level: advanced 1840 1841 Notes: 1842 This function is only needed if auxiliary data needs to be set up on coarse grids. 1843 1844 If this function is called multiple times, the hooks will be run in the order they are added. 1845 1846 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1847 extract the finest level information from its context (instead of from the SNES). 1848 1849 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1850 @*/ 1851 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1852 { 1853 PetscErrorCode ierr; 1854 DMCoarsenHookLink link,*p; 1855 1856 PetscFunctionBegin; 1857 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1858 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1859 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1860 link->coarsenhook = coarsenhook; 1861 link->restricthook = restricthook; 1862 link->ctx = ctx; 1863 link->next = PETSC_NULL; 1864 *p = link; 1865 PetscFunctionReturn(0); 1866 } 1867 1868 #undef __FUNCT__ 1869 #define __FUNCT__ "DMRestrict" 1870 /*@ 1871 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1872 1873 Collective if any hooks are 1874 1875 Input Arguments: 1876 + fine - finer DM to use as a base 1877 . restrct - restriction matrix, apply using MatRestrict() 1878 . inject - injection matrix, also use MatRestrict() 1879 - coarse - coarer DM to update 1880 1881 Level: developer 1882 1883 .seealso: DMCoarsenHookAdd(), MatRestrict() 1884 @*/ 1885 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1886 { 1887 PetscErrorCode ierr; 1888 DMCoarsenHookLink link; 1889 1890 PetscFunctionBegin; 1891 for (link=fine->coarsenhook; link; link=link->next) { 1892 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1893 } 1894 PetscFunctionReturn(0); 1895 } 1896 1897 #undef __FUNCT__ 1898 #define __FUNCT__ "DMBlockRestrictHookAdd" 1899 /*@ 1900 DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1901 1902 Logically Collective 1903 1904 Input Arguments: 1905 + global - global DM 1906 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1907 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1908 1909 Calling sequence for restricthook: 1910 $ restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1911 1912 + global - global DM 1913 . out - scatter to the outer (with ghost and overlap points) block vector 1914 . in - scatter to block vector values only owned locally 1915 . block - block DM (may just be a shell if the global DM is passed in correctly) 1916 - ctx - optional user-defined function context 1917 1918 Level: advanced 1919 1920 Notes: 1921 This function is only needed if auxiliary data needs to be set up on coarse grids. 1922 1923 If this function is called multiple times, the hooks will be run in the order they are added. 1924 1925 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1926 extract the finest level information from its context (instead of from the SNES). 1927 1928 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1929 @*/ 1930 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1931 { 1932 PetscErrorCode ierr; 1933 DMBlockRestrictHookLink link,*p; 1934 1935 PetscFunctionBegin; 1936 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1937 for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1938 ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr); 1939 link->restricthook = restricthook; 1940 link->ctx = ctx; 1941 link->next = PETSC_NULL; 1942 *p = link; 1943 PetscFunctionReturn(0); 1944 } 1945 1946 #undef __FUNCT__ 1947 #define __FUNCT__ "DMBlockRestrict" 1948 /*@ 1949 DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd() 1950 1951 Collective if any hooks are 1952 1953 Input Arguments: 1954 + fine - finer DM to use as a base 1955 . restrct - restriction matrix, apply using MatRestrict() 1956 . inject - injection matrix, also use MatRestrict() 1957 - coarse - coarer DM to update 1958 1959 Level: developer 1960 1961 .seealso: DMCoarsenHookAdd(), MatRestrict() 1962 @*/ 1963 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block) 1964 { 1965 PetscErrorCode ierr; 1966 DMBlockRestrictHookLink link; 1967 1968 PetscFunctionBegin; 1969 for (link=global->blockrestricthook; link; link=link->next) { 1970 if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);} 1971 } 1972 PetscFunctionReturn(0); 1973 } 1974 1975 #undef __FUNCT__ 1976 #define __FUNCT__ "DMGetCoarsenLevel" 1977 /*@ 1978 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1979 1980 Not Collective 1981 1982 Input Parameter: 1983 . dm - the DM object 1984 1985 Output Parameter: 1986 . level - number of coarsenings 1987 1988 Level: developer 1989 1990 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1991 1992 @*/ 1993 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 1994 { 1995 PetscFunctionBegin; 1996 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1997 *level = dm->leveldown; 1998 PetscFunctionReturn(0); 1999 } 2000 2001 2002 2003 #undef __FUNCT__ 2004 #define __FUNCT__ "DMRefineHierarchy" 2005 /*@C 2006 DMRefineHierarchy - Refines a DM object, all levels at once 2007 2008 Collective on DM 2009 2010 Input Parameter: 2011 + dm - the DM object 2012 - nlevels - the number of levels of refinement 2013 2014 Output Parameter: 2015 . dmf - the refined DM hierarchy 2016 2017 Level: developer 2018 2019 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2020 2021 @*/ 2022 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2023 { 2024 PetscErrorCode ierr; 2025 2026 PetscFunctionBegin; 2027 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2028 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2029 if (nlevels == 0) PetscFunctionReturn(0); 2030 if (dm->ops->refinehierarchy) { 2031 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2032 } else if (dm->ops->refine) { 2033 PetscInt i; 2034 2035 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 2036 for (i=1; i<nlevels; i++) { 2037 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 2038 } 2039 } else { 2040 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2041 } 2042 PetscFunctionReturn(0); 2043 } 2044 2045 #undef __FUNCT__ 2046 #define __FUNCT__ "DMCoarsenHierarchy" 2047 /*@C 2048 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2049 2050 Collective on DM 2051 2052 Input Parameter: 2053 + dm - the DM object 2054 - nlevels - the number of levels of coarsening 2055 2056 Output Parameter: 2057 . dmc - the coarsened DM hierarchy 2058 2059 Level: developer 2060 2061 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2062 2063 @*/ 2064 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2065 { 2066 PetscErrorCode ierr; 2067 2068 PetscFunctionBegin; 2069 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2070 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2071 if (nlevels == 0) PetscFunctionReturn(0); 2072 PetscValidPointer(dmc,3); 2073 if (dm->ops->coarsenhierarchy) { 2074 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2075 } else if (dm->ops->coarsen) { 2076 PetscInt i; 2077 2078 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 2079 for (i=1; i<nlevels; i++) { 2080 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 2081 } 2082 } else { 2083 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2084 } 2085 PetscFunctionReturn(0); 2086 } 2087 2088 #undef __FUNCT__ 2089 #define __FUNCT__ "DMCreateAggregates" 2090 /*@ 2091 DMCreateAggregates - Gets the aggregates that map between 2092 grids associated with two DMs. 2093 2094 Collective on DM 2095 2096 Input Parameters: 2097 + dmc - the coarse grid DM 2098 - dmf - the fine grid DM 2099 2100 Output Parameters: 2101 . rest - the restriction matrix (transpose of the projection matrix) 2102 2103 Level: intermediate 2104 2105 .keywords: interpolation, restriction, multigrid 2106 2107 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2108 @*/ 2109 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2110 { 2111 PetscErrorCode ierr; 2112 2113 PetscFunctionBegin; 2114 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2115 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2116 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2117 PetscFunctionReturn(0); 2118 } 2119 2120 #undef __FUNCT__ 2121 #define __FUNCT__ "DMSetApplicationContextDestroy" 2122 /*@C 2123 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2124 2125 Not Collective 2126 2127 Input Parameters: 2128 + dm - the DM object 2129 - destroy - the destroy function 2130 2131 Level: intermediate 2132 2133 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2134 2135 @*/ 2136 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2137 { 2138 PetscFunctionBegin; 2139 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2140 dm->ctxdestroy = destroy; 2141 PetscFunctionReturn(0); 2142 } 2143 2144 #undef __FUNCT__ 2145 #define __FUNCT__ "DMSetApplicationContext" 2146 /*@ 2147 DMSetApplicationContext - Set a user context into a DM object 2148 2149 Not Collective 2150 2151 Input Parameters: 2152 + dm - the DM object 2153 - ctx - the user context 2154 2155 Level: intermediate 2156 2157 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2158 2159 @*/ 2160 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2161 { 2162 PetscFunctionBegin; 2163 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2164 dm->ctx = ctx; 2165 PetscFunctionReturn(0); 2166 } 2167 2168 #undef __FUNCT__ 2169 #define __FUNCT__ "DMGetApplicationContext" 2170 /*@ 2171 DMGetApplicationContext - Gets a user context from a DM object 2172 2173 Not Collective 2174 2175 Input Parameter: 2176 . dm - the DM object 2177 2178 Output Parameter: 2179 . ctx - the user context 2180 2181 Level: intermediate 2182 2183 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2184 2185 @*/ 2186 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2187 { 2188 PetscFunctionBegin; 2189 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2190 *(void**)ctx = dm->ctx; 2191 PetscFunctionReturn(0); 2192 } 2193 2194 #undef __FUNCT__ 2195 #define __FUNCT__ "DMSetInitialGuess" 2196 /*@C 2197 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 2198 2199 Logically Collective on DM 2200 2201 Input Parameter: 2202 + dm - the DM object to destroy 2203 - f - the function to compute the initial guess 2204 2205 Level: intermediate 2206 2207 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2208 2209 @*/ 2210 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 2211 { 2212 PetscFunctionBegin; 2213 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2214 dm->ops->initialguess = f; 2215 PetscFunctionReturn(0); 2216 } 2217 2218 #undef __FUNCT__ 2219 #define __FUNCT__ "DMSetFunction" 2220 /*@C 2221 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 2222 2223 Logically Collective on DM 2224 2225 Input Parameter: 2226 + dm - the DM object 2227 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 2228 2229 Level: intermediate 2230 2231 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 2232 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 2233 2234 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2235 DMSetJacobian() 2236 2237 @*/ 2238 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2239 { 2240 PetscFunctionBegin; 2241 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2242 dm->ops->function = f; 2243 if (f) { 2244 dm->ops->functionj = f; 2245 } 2246 PetscFunctionReturn(0); 2247 } 2248 2249 #undef __FUNCT__ 2250 #define __FUNCT__ "DMSetJacobian" 2251 /*@C 2252 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 2253 2254 Logically Collective on DM 2255 2256 Input Parameter: 2257 + dm - the DM object to destroy 2258 - f - the function to compute the matrix entries 2259 2260 Level: intermediate 2261 2262 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2263 DMSetFunction() 2264 2265 @*/ 2266 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 2267 { 2268 PetscFunctionBegin; 2269 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2270 dm->ops->jacobian = f; 2271 PetscFunctionReturn(0); 2272 } 2273 2274 #undef __FUNCT__ 2275 #define __FUNCT__ "DMSetVariableBounds" 2276 /*@C 2277 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2278 2279 Logically Collective on DM 2280 2281 Input Parameter: 2282 + dm - the DM object 2283 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 2284 2285 Level: intermediate 2286 2287 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2288 DMSetJacobian() 2289 2290 @*/ 2291 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2292 { 2293 PetscFunctionBegin; 2294 dm->ops->computevariablebounds = f; 2295 PetscFunctionReturn(0); 2296 } 2297 2298 #undef __FUNCT__ 2299 #define __FUNCT__ "DMHasVariableBounds" 2300 /*@ 2301 DMHasVariableBounds - does the DM object have a variable bounds function? 2302 2303 Not Collective 2304 2305 Input Parameter: 2306 . dm - the DM object to destroy 2307 2308 Output Parameter: 2309 . flg - PETSC_TRUE if the variable bounds function exists 2310 2311 Level: developer 2312 2313 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2314 2315 @*/ 2316 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2317 { 2318 PetscFunctionBegin; 2319 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2320 PetscFunctionReturn(0); 2321 } 2322 2323 #undef __FUNCT__ 2324 #define __FUNCT__ "DMComputeVariableBounds" 2325 /*@C 2326 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2327 2328 Logically Collective on DM 2329 2330 Input Parameters: 2331 + dm - the DM object to destroy 2332 - x - current solution at which the bounds are computed 2333 2334 Output parameters: 2335 + xl - lower bound 2336 - xu - upper bound 2337 2338 Level: intermediate 2339 2340 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2341 DMSetFunction(), DMSetVariableBounds() 2342 2343 @*/ 2344 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2345 { 2346 PetscErrorCode ierr; 2347 PetscFunctionBegin; 2348 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2349 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2350 if (dm->ops->computevariablebounds) { 2351 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2352 } 2353 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2354 PetscFunctionReturn(0); 2355 } 2356 2357 #undef __FUNCT__ 2358 #define __FUNCT__ "DMComputeInitialGuess" 2359 /*@ 2360 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2361 2362 Collective on DM 2363 2364 Input Parameter: 2365 + dm - the DM object to destroy 2366 - x - the vector to hold the initial guess values 2367 2368 Level: developer 2369 2370 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2371 2372 @*/ 2373 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2374 { 2375 PetscErrorCode ierr; 2376 PetscFunctionBegin; 2377 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2378 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2379 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2380 PetscFunctionReturn(0); 2381 } 2382 2383 #undef __FUNCT__ 2384 #define __FUNCT__ "DMHasInitialGuess" 2385 /*@ 2386 DMHasInitialGuess - does the DM object have an initial guess function 2387 2388 Not Collective 2389 2390 Input Parameter: 2391 . dm - the DM object to destroy 2392 2393 Output Parameter: 2394 . flg - PETSC_TRUE if function exists 2395 2396 Level: developer 2397 2398 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2399 2400 @*/ 2401 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2402 { 2403 PetscFunctionBegin; 2404 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2405 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2406 PetscFunctionReturn(0); 2407 } 2408 2409 #undef __FUNCT__ 2410 #define __FUNCT__ "DMHasFunction" 2411 /*@ 2412 DMHasFunction - does the DM object have a function 2413 2414 Not Collective 2415 2416 Input Parameter: 2417 . dm - the DM object to destroy 2418 2419 Output Parameter: 2420 . flg - PETSC_TRUE if function exists 2421 2422 Level: developer 2423 2424 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2425 2426 @*/ 2427 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2428 { 2429 PetscFunctionBegin; 2430 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2431 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2432 PetscFunctionReturn(0); 2433 } 2434 2435 #undef __FUNCT__ 2436 #define __FUNCT__ "DMHasJacobian" 2437 /*@ 2438 DMHasJacobian - does the DM object have a matrix function 2439 2440 Not Collective 2441 2442 Input Parameter: 2443 . dm - the DM object to destroy 2444 2445 Output Parameter: 2446 . flg - PETSC_TRUE if function exists 2447 2448 Level: developer 2449 2450 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2451 2452 @*/ 2453 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2454 { 2455 PetscFunctionBegin; 2456 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2457 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2458 PetscFunctionReturn(0); 2459 } 2460 2461 #undef __FUNCT__ 2462 #define __FUNCT__ "DMHasColoring" 2463 /*@ 2464 DMHasColoring - does the DM object have a method of providing a coloring? 2465 2466 Not Collective 2467 2468 Input Parameter: 2469 . dm - the DM object 2470 2471 Output Parameter: 2472 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2473 2474 Level: developer 2475 2476 .seealso DMHasFunction(), DMCreateColoring() 2477 2478 @*/ 2479 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2480 { 2481 PetscFunctionBegin; 2482 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2483 PetscFunctionReturn(0); 2484 } 2485 2486 #undef __FUNCT__ 2487 #define __FUNCT__ "DMSetVec" 2488 /*@C 2489 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2490 2491 Collective on DM 2492 2493 Input Parameter: 2494 + dm - the DM object 2495 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2496 2497 Level: developer 2498 2499 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2500 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2501 2502 @*/ 2503 PetscErrorCode DMSetVec(DM dm,Vec x) 2504 { 2505 PetscErrorCode ierr; 2506 PetscFunctionBegin; 2507 if (x) { 2508 if (!dm->x) { 2509 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2510 } 2511 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2512 } 2513 else if (dm->x) { 2514 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2515 } 2516 PetscFunctionReturn(0); 2517 } 2518 2519 2520 #undef __FUNCT__ 2521 #define __FUNCT__ "DMComputeFunction" 2522 /*@ 2523 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2524 2525 Collective on DM 2526 2527 Input Parameter: 2528 + dm - the DM object to destroy 2529 . x - the location where the function is evaluationed, may be ignored for linear problems 2530 - b - the vector to hold the right hand side entries 2531 2532 Level: developer 2533 2534 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2535 DMSetJacobian() 2536 2537 @*/ 2538 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2539 { 2540 PetscErrorCode ierr; 2541 PetscFunctionBegin; 2542 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2543 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2544 PetscStackPush("DM user function"); 2545 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2546 PetscStackPop; 2547 PetscFunctionReturn(0); 2548 } 2549 2550 2551 2552 #undef __FUNCT__ 2553 #define __FUNCT__ "DMComputeJacobian" 2554 /*@ 2555 DMComputeJacobian - compute the matrix entries for the solver 2556 2557 Collective on DM 2558 2559 Input Parameter: 2560 + dm - the DM object 2561 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2562 . A - matrix that defines the operator for the linear solve 2563 - B - the matrix used to construct the preconditioner 2564 2565 Level: developer 2566 2567 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2568 DMSetFunction() 2569 2570 @*/ 2571 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2572 { 2573 PetscErrorCode ierr; 2574 2575 PetscFunctionBegin; 2576 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2577 if (!dm->ops->jacobian) { 2578 ISColoring coloring; 2579 MatFDColoring fd; 2580 MatType mtype; 2581 2582 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2583 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2584 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2585 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2586 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2587 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2588 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2589 2590 dm->fd = fd; 2591 dm->ops->jacobian = DMComputeJacobianDefault; 2592 2593 /* don't know why this is needed */ 2594 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2595 } 2596 if (!x) x = dm->x; 2597 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2598 2599 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2600 if (x) { 2601 if (!dm->x) { 2602 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2603 } 2604 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2605 } 2606 if (A != B) { 2607 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2608 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2609 } 2610 PetscFunctionReturn(0); 2611 } 2612 2613 2614 PetscFList DMList = PETSC_NULL; 2615 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2616 2617 #undef __FUNCT__ 2618 #define __FUNCT__ "DMSetType" 2619 /*@C 2620 DMSetType - Builds a DM, for a particular DM implementation. 2621 2622 Collective on DM 2623 2624 Input Parameters: 2625 + dm - The DM object 2626 - method - The name of the DM type 2627 2628 Options Database Key: 2629 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2630 2631 Notes: 2632 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2633 2634 Level: intermediate 2635 2636 .keywords: DM, set, type 2637 .seealso: DMGetType(), DMCreate() 2638 @*/ 2639 PetscErrorCode DMSetType(DM dm, DMType method) 2640 { 2641 PetscErrorCode (*r)(DM); 2642 PetscBool match; 2643 PetscErrorCode ierr; 2644 2645 PetscFunctionBegin; 2646 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2647 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2648 if (match) PetscFunctionReturn(0); 2649 2650 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2651 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2652 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2653 2654 if (dm->ops->destroy) { 2655 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2656 dm->ops->destroy = PETSC_NULL; 2657 } 2658 ierr = (*r)(dm);CHKERRQ(ierr); 2659 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2660 PetscFunctionReturn(0); 2661 } 2662 2663 #undef __FUNCT__ 2664 #define __FUNCT__ "DMGetType" 2665 /*@C 2666 DMGetType - Gets the DM type name (as a string) from the DM. 2667 2668 Not Collective 2669 2670 Input Parameter: 2671 . dm - The DM 2672 2673 Output Parameter: 2674 . type - The DM type name 2675 2676 Level: intermediate 2677 2678 .keywords: DM, get, type, name 2679 .seealso: DMSetType(), DMCreate() 2680 @*/ 2681 PetscErrorCode DMGetType(DM dm, DMType *type) 2682 { 2683 PetscErrorCode ierr; 2684 2685 PetscFunctionBegin; 2686 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2687 PetscValidCharPointer(type,2); 2688 if (!DMRegisterAllCalled) { 2689 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2690 } 2691 *type = ((PetscObject)dm)->type_name; 2692 PetscFunctionReturn(0); 2693 } 2694 2695 #undef __FUNCT__ 2696 #define __FUNCT__ "DMConvert" 2697 /*@C 2698 DMConvert - Converts a DM to another DM, either of the same or different type. 2699 2700 Collective on DM 2701 2702 Input Parameters: 2703 + dm - the DM 2704 - newtype - new DM type (use "same" for the same type) 2705 2706 Output Parameter: 2707 . M - pointer to new DM 2708 2709 Notes: 2710 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2711 the MPI communicator of the generated DM is always the same as the communicator 2712 of the input DM. 2713 2714 Level: intermediate 2715 2716 .seealso: DMCreate() 2717 @*/ 2718 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2719 { 2720 DM B; 2721 char convname[256]; 2722 PetscBool sametype, issame; 2723 PetscErrorCode ierr; 2724 2725 PetscFunctionBegin; 2726 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2727 PetscValidType(dm,1); 2728 PetscValidPointer(M,3); 2729 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2730 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2731 { 2732 PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL; 2733 2734 /* 2735 Order of precedence: 2736 1) See if a specialized converter is known to the current DM. 2737 2) See if a specialized converter is known to the desired DM class. 2738 3) See if a good general converter is registered for the desired class 2739 4) See if a good general converter is known for the current matrix. 2740 5) Use a really basic converter. 2741 */ 2742 2743 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2744 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2745 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2746 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2747 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2748 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2749 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2750 if (conv) goto foundconv; 2751 2752 /* 2) See if a specialized converter is known to the desired DM class. */ 2753 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2754 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2755 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2756 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2757 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2758 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2759 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2760 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2761 if (conv) { 2762 ierr = DMDestroy(&B);CHKERRQ(ierr); 2763 goto foundconv; 2764 } 2765 2766 #if 0 2767 /* 3) See if a good general converter is registered for the desired class */ 2768 conv = B->ops->convertfrom; 2769 ierr = DMDestroy(&B);CHKERRQ(ierr); 2770 if (conv) goto foundconv; 2771 2772 /* 4) See if a good general converter is known for the current matrix */ 2773 if (dm->ops->convert) { 2774 conv = dm->ops->convert; 2775 } 2776 if (conv) goto foundconv; 2777 #endif 2778 2779 /* 5) Use a really basic converter. */ 2780 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2781 2782 foundconv: 2783 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2784 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2785 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2786 } 2787 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2788 PetscFunctionReturn(0); 2789 } 2790 2791 /*--------------------------------------------------------------------------------------------------------------------*/ 2792 2793 #undef __FUNCT__ 2794 #define __FUNCT__ "DMRegister" 2795 /*@C 2796 DMRegister - See DMRegisterDynamic() 2797 2798 Level: advanced 2799 @*/ 2800 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2801 { 2802 char fullname[PETSC_MAX_PATH_LEN]; 2803 PetscErrorCode ierr; 2804 2805 PetscFunctionBegin; 2806 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2807 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2808 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2809 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2810 PetscFunctionReturn(0); 2811 } 2812 2813 2814 /*--------------------------------------------------------------------------------------------------------------------*/ 2815 #undef __FUNCT__ 2816 #define __FUNCT__ "DMRegisterDestroy" 2817 /*@C 2818 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2819 2820 Not Collective 2821 2822 Level: advanced 2823 2824 .keywords: DM, register, destroy 2825 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2826 @*/ 2827 PetscErrorCode DMRegisterDestroy(void) 2828 { 2829 PetscErrorCode ierr; 2830 2831 PetscFunctionBegin; 2832 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2833 DMRegisterAllCalled = PETSC_FALSE; 2834 PetscFunctionReturn(0); 2835 } 2836 2837 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2838 #include <mex.h> 2839 2840 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2841 2842 #undef __FUNCT__ 2843 #define __FUNCT__ "DMComputeFunction_Matlab" 2844 /* 2845 DMComputeFunction_Matlab - Calls the function that has been set with 2846 DMSetFunctionMatlab(). 2847 2848 For linear problems x is null 2849 2850 .seealso: DMSetFunction(), DMGetFunction() 2851 */ 2852 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2853 { 2854 PetscErrorCode ierr; 2855 DMMatlabContext *sctx; 2856 int nlhs = 1,nrhs = 4; 2857 mxArray *plhs[1],*prhs[4]; 2858 long long int lx = 0,ly = 0,ls = 0; 2859 2860 PetscFunctionBegin; 2861 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2862 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2863 PetscCheckSameComm(dm,1,y,3); 2864 2865 /* call Matlab function in ctx with arguments x and y */ 2866 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2867 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2868 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2869 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2870 prhs[0] = mxCreateDoubleScalar((double)ls); 2871 prhs[1] = mxCreateDoubleScalar((double)lx); 2872 prhs[2] = mxCreateDoubleScalar((double)ly); 2873 prhs[3] = mxCreateString(sctx->funcname); 2874 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2875 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2876 mxDestroyArray(prhs[0]); 2877 mxDestroyArray(prhs[1]); 2878 mxDestroyArray(prhs[2]); 2879 mxDestroyArray(prhs[3]); 2880 mxDestroyArray(plhs[0]); 2881 PetscFunctionReturn(0); 2882 } 2883 2884 2885 #undef __FUNCT__ 2886 #define __FUNCT__ "DMSetFunctionMatlab" 2887 /* 2888 DMSetFunctionMatlab - Sets the function evaluation routine 2889 2890 */ 2891 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2892 { 2893 PetscErrorCode ierr; 2894 DMMatlabContext *sctx; 2895 2896 PetscFunctionBegin; 2897 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2898 /* currently sctx is memory bleed */ 2899 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2900 if (!sctx) { 2901 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2902 } 2903 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2904 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2905 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2906 PetscFunctionReturn(0); 2907 } 2908 2909 #undef __FUNCT__ 2910 #define __FUNCT__ "DMComputeJacobian_Matlab" 2911 /* 2912 DMComputeJacobian_Matlab - Calls the function that has been set with 2913 DMSetJacobianMatlab(). 2914 2915 For linear problems x is null 2916 2917 .seealso: DMSetFunction(), DMGetFunction() 2918 */ 2919 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2920 { 2921 PetscErrorCode ierr; 2922 DMMatlabContext *sctx; 2923 int nlhs = 2,nrhs = 5; 2924 mxArray *plhs[2],*prhs[5]; 2925 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2926 2927 PetscFunctionBegin; 2928 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2929 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2930 2931 /* call MATLAB function in ctx with arguments x, A, and B */ 2932 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2933 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2934 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2935 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2936 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2937 prhs[0] = mxCreateDoubleScalar((double)ls); 2938 prhs[1] = mxCreateDoubleScalar((double)lx); 2939 prhs[2] = mxCreateDoubleScalar((double)lA); 2940 prhs[3] = mxCreateDoubleScalar((double)lB); 2941 prhs[4] = mxCreateString(sctx->jacname); 2942 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2943 *str = (MatStructure) mxGetScalar(plhs[0]); 2944 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2945 mxDestroyArray(prhs[0]); 2946 mxDestroyArray(prhs[1]); 2947 mxDestroyArray(prhs[2]); 2948 mxDestroyArray(prhs[3]); 2949 mxDestroyArray(prhs[4]); 2950 mxDestroyArray(plhs[0]); 2951 mxDestroyArray(plhs[1]); 2952 PetscFunctionReturn(0); 2953 } 2954 2955 2956 #undef __FUNCT__ 2957 #define __FUNCT__ "DMSetJacobianMatlab" 2958 /* 2959 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2960 2961 */ 2962 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2963 { 2964 PetscErrorCode ierr; 2965 DMMatlabContext *sctx; 2966 2967 PetscFunctionBegin; 2968 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2969 /* currently sctx is memory bleed */ 2970 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2971 if (!sctx) { 2972 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2973 } 2974 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2975 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2976 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2977 PetscFunctionReturn(0); 2978 } 2979 #endif 2980 2981 #undef __FUNCT__ 2982 #define __FUNCT__ "DMLoad" 2983 /*@C 2984 DMLoad - Loads a DM that has been stored in binary or HDF5 format 2985 with DMView(). 2986 2987 Collective on PetscViewer 2988 2989 Input Parameters: 2990 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2991 some related function before a call to DMLoad(). 2992 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2993 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2994 2995 Level: intermediate 2996 2997 Notes: 2998 Defaults to the DM DA. 2999 3000 Notes for advanced users: 3001 Most users should not need to know the details of the binary storage 3002 format, since DMLoad() and DMView() completely hide these details. 3003 But for anyone who's interested, the standard binary matrix storage 3004 format is 3005 .vb 3006 has not yet been determined 3007 .ve 3008 3009 In addition, PETSc automatically does the byte swapping for 3010 machines that store the bytes reversed, e.g. DEC alpha, freebsd, 3011 linux, Windows and the paragon; thus if you write your own binary 3012 read/write routines you have to swap the bytes; see PetscBinaryRead() 3013 and PetscBinaryWrite() to see how this may be done. 3014 3015 Concepts: vector^loading from file 3016 3017 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 3018 @*/ 3019 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 3020 { 3021 PetscErrorCode ierr; 3022 3023 PetscFunctionBegin; 3024 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 3025 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 3026 3027 if (!((PetscObject)newdm)->type_name) { 3028 ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr); 3029 } 3030 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 3031 PetscFunctionReturn(0); 3032 } 3033 3034 /******************************** FEM Support **********************************/ 3035 3036 #undef __FUNCT__ 3037 #define __FUNCT__ "DMPrintCellVector" 3038 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 3039 PetscInt f; 3040 PetscErrorCode ierr; 3041 3042 PetscFunctionBegin; 3043 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 3044 for (f = 0; f < len; ++f) { 3045 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 3046 } 3047 PetscFunctionReturn(0); 3048 } 3049 3050 #undef __FUNCT__ 3051 #define __FUNCT__ "DMPrintCellMatrix" 3052 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 3053 PetscInt f, g; 3054 PetscErrorCode ierr; 3055 3056 PetscFunctionBegin; 3057 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 3058 for (f = 0; f < rows; ++f) { 3059 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 3060 for (g = 0; g < cols; ++g) { 3061 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 3062 } 3063 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 3064 } 3065 PetscFunctionReturn(0); 3066 } 3067 3068 #undef __FUNCT__ 3069 #define __FUNCT__ "DMGetLocalFunction" 3070 /*@C 3071 DMGetLocalFunction - Get the local residual function from this DM 3072 3073 Not collective 3074 3075 Input Parameter: 3076 . dm - The DM 3077 3078 Output Parameter: 3079 . lf - The local residual function 3080 3081 Calling sequence of lf: 3082 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3083 3084 + snes - the SNES context 3085 . x - local vector with the state at which to evaluate residual 3086 . f - local vector to put residual in 3087 - ctx - optional user-defined function context 3088 3089 Level: intermediate 3090 3091 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3092 @*/ 3093 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 3094 { 3095 PetscFunctionBegin; 3096 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3097 if (lf) *lf = dm->lf; 3098 PetscFunctionReturn(0); 3099 } 3100 3101 #undef __FUNCT__ 3102 #define __FUNCT__ "DMSetLocalFunction" 3103 /*@C 3104 DMSetLocalFunction - Set the local residual function from this DM 3105 3106 Not collective 3107 3108 Input Parameters: 3109 + dm - The DM 3110 - lf - The local residual function 3111 3112 Calling sequence of lf: 3113 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3114 3115 + snes - the SNES context 3116 . x - local vector with the state at which to evaluate residual 3117 . f - local vector to put residual in 3118 - ctx - optional user-defined function context 3119 3120 Level: intermediate 3121 3122 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3123 @*/ 3124 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 3125 { 3126 PetscFunctionBegin; 3127 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3128 dm->lf = lf; 3129 PetscFunctionReturn(0); 3130 } 3131 3132 #undef __FUNCT__ 3133 #define __FUNCT__ "DMGetLocalJacobian" 3134 /*@C 3135 DMGetLocalJacobian - Get the local Jacobian function from this DM 3136 3137 Not collective 3138 3139 Input Parameter: 3140 . dm - The DM 3141 3142 Output Parameter: 3143 . lj - The local Jacobian function 3144 3145 Calling sequence of lj: 3146 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3147 3148 + snes - the SNES context 3149 . x - local vector with the state at which to evaluate residual 3150 . J - matrix to put Jacobian in 3151 . M - matrix to use for defining Jacobian preconditioner 3152 - ctx - optional user-defined function context 3153 3154 Level: intermediate 3155 3156 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3157 @*/ 3158 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 3159 { 3160 PetscFunctionBegin; 3161 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3162 if (lj) *lj = dm->lj; 3163 PetscFunctionReturn(0); 3164 } 3165 3166 #undef __FUNCT__ 3167 #define __FUNCT__ "DMSetLocalJacobian" 3168 /*@C 3169 DMSetLocalJacobian - Set the local Jacobian function from this DM 3170 3171 Not collective 3172 3173 Input Parameters: 3174 + dm - The DM 3175 - lj - The local Jacobian function 3176 3177 Calling sequence of lj: 3178 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3179 3180 + snes - the SNES context 3181 . x - local vector with the state at which to evaluate residual 3182 . J - matrix to put Jacobian in 3183 . M - matrix to use for defining Jacobian preconditioner 3184 - ctx - optional user-defined function context 3185 3186 Level: intermediate 3187 3188 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3189 @*/ 3190 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 3191 { 3192 PetscFunctionBegin; 3193 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3194 dm->lj = lj; 3195 PetscFunctionReturn(0); 3196 } 3197 3198 #undef __FUNCT__ 3199 #define __FUNCT__ "DMGetDefaultSection" 3200 /*@ 3201 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 3202 3203 Input Parameter: 3204 . dm - The DM 3205 3206 Output Parameter: 3207 . section - The PetscSection 3208 3209 Level: intermediate 3210 3211 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3212 3213 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3214 @*/ 3215 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 3216 PetscFunctionBegin; 3217 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3218 PetscValidPointer(section, 2); 3219 *section = dm->defaultSection; 3220 PetscFunctionReturn(0); 3221 } 3222 3223 #undef __FUNCT__ 3224 #define __FUNCT__ "DMSetDefaultSection" 3225 /*@ 3226 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 3227 3228 Input Parameters: 3229 + dm - The DM 3230 - section - The PetscSection 3231 3232 Level: intermediate 3233 3234 Note: Any existing Section will be destroyed 3235 3236 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3237 @*/ 3238 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 3239 PetscInt numFields; 3240 PetscInt f; 3241 PetscErrorCode ierr; 3242 3243 PetscFunctionBegin; 3244 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3245 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 3246 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3247 dm->defaultSection = section; 3248 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 3249 if (numFields) { 3250 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 3251 for (f = 0; f < numFields; ++f) { 3252 const char *name; 3253 3254 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 3255 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 3256 } 3257 } 3258 PetscFunctionReturn(0); 3259 } 3260 3261 #undef __FUNCT__ 3262 #define __FUNCT__ "DMGetDefaultGlobalSection" 3263 /*@ 3264 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 3265 3266 Input Parameter: 3267 . dm - The DM 3268 3269 Output Parameter: 3270 . section - The PetscSection 3271 3272 Level: intermediate 3273 3274 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3275 3276 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 3277 @*/ 3278 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 3279 PetscErrorCode ierr; 3280 3281 PetscFunctionBegin; 3282 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3283 PetscValidPointer(section, 2); 3284 if (!dm->defaultGlobalSection) { 3285 if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 3286 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 3287 } 3288 *section = dm->defaultGlobalSection; 3289 PetscFunctionReturn(0); 3290 } 3291 3292 #undef __FUNCT__ 3293 #define __FUNCT__ "DMSetDefaultGlobalSection" 3294 /*@ 3295 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 3296 3297 Input Parameters: 3298 + dm - The DM 3299 - section - The PetscSection 3300 3301 Level: intermediate 3302 3303 Note: Any existing Section will be destroyed 3304 3305 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 3306 @*/ 3307 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) { 3308 PetscErrorCode ierr; 3309 3310 PetscFunctionBegin; 3311 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3312 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3313 dm->defaultGlobalSection = section; 3314 PetscFunctionReturn(0); 3315 } 3316 3317 #undef __FUNCT__ 3318 #define __FUNCT__ "DMGetDefaultSF" 3319 /*@ 3320 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3321 it is created from the default PetscSection layouts in the DM. 3322 3323 Input Parameter: 3324 . dm - The DM 3325 3326 Output Parameter: 3327 . sf - The PetscSF 3328 3329 Level: intermediate 3330 3331 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3332 3333 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3334 @*/ 3335 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3336 PetscInt nroots; 3337 PetscErrorCode ierr; 3338 3339 PetscFunctionBegin; 3340 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3341 PetscValidPointer(sf, 2); 3342 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3343 if (nroots < 0) { 3344 PetscSection section, gSection; 3345 3346 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3347 if (section) { 3348 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3349 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3350 } else { 3351 *sf = PETSC_NULL; 3352 PetscFunctionReturn(0); 3353 } 3354 } 3355 *sf = dm->defaultSF; 3356 PetscFunctionReturn(0); 3357 } 3358 3359 #undef __FUNCT__ 3360 #define __FUNCT__ "DMSetDefaultSF" 3361 /*@ 3362 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3363 3364 Input Parameters: 3365 + dm - The DM 3366 - sf - The PetscSF 3367 3368 Level: intermediate 3369 3370 Note: Any previous SF is destroyed 3371 3372 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3373 @*/ 3374 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3375 PetscErrorCode ierr; 3376 3377 PetscFunctionBegin; 3378 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3379 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3380 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3381 dm->defaultSF = sf; 3382 PetscFunctionReturn(0); 3383 } 3384 3385 #undef __FUNCT__ 3386 #define __FUNCT__ "DMCreateDefaultSF" 3387 /*@C 3388 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3389 describing the data layout. 3390 3391 Input Parameters: 3392 + dm - The DM 3393 . localSection - PetscSection describing the local data layout 3394 - globalSection - PetscSection describing the global data layout 3395 3396 Level: intermediate 3397 3398 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3399 @*/ 3400 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3401 { 3402 MPI_Comm comm = ((PetscObject) dm)->comm; 3403 PetscLayout layout; 3404 const PetscInt *ranges; 3405 PetscInt *local; 3406 PetscSFNode *remote; 3407 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3408 PetscMPIInt size, rank; 3409 PetscErrorCode ierr; 3410 3411 PetscFunctionBegin; 3412 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3413 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3414 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3415 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3416 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3417 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3418 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3419 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3420 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3421 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3422 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 3423 PetscInt gdof, gcdof; 3424 3425 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3426 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3427 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3428 } 3429 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3430 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3431 for (p = pStart, l = 0; p < pEnd; ++p) { 3432 const PetscInt *cind; 3433 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3434 3435 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3436 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3437 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3438 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3439 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3440 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3441 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3442 if (!gdof) continue; /* Censored point */ 3443 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3444 if (gsize != dof-cdof) { 3445 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3446 cdof = 0; /* Ignore constraints */ 3447 } 3448 for (d = 0, c = 0; d < dof; ++d) { 3449 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3450 local[l+d-c] = off+d; 3451 } 3452 if (gdof < 0) { 3453 for(d = 0; d < gsize; ++d, ++l) { 3454 PetscInt offset = -(goff+1) + d, r; 3455 3456 for (r = 0; r < size; ++r) { 3457 if ((offset >= ranges[r]) && (offset < ranges[r+1])) break; 3458 } 3459 remote[l].rank = r; 3460 remote[l].index = offset - ranges[r]; 3461 } 3462 } else { 3463 for(d = 0; d < gsize; ++d, ++l) { 3464 remote[l].rank = rank; 3465 remote[l].index = goff+d - ranges[rank]; 3466 } 3467 } 3468 } 3469 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3470 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3471 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3472 PetscFunctionReturn(0); 3473 } 3474 3475 #undef __FUNCT__ 3476 #define __FUNCT__ "DMGetPointSF" 3477 /*@ 3478 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3479 3480 Input Parameter: 3481 . dm - The DM 3482 3483 Output Parameter: 3484 . sf - The PetscSF 3485 3486 Level: intermediate 3487 3488 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3489 3490 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3491 @*/ 3492 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) { 3493 PetscFunctionBegin; 3494 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3495 PetscValidPointer(sf, 2); 3496 *sf = dm->sf; 3497 PetscFunctionReturn(0); 3498 } 3499 3500 #undef __FUNCT__ 3501 #define __FUNCT__ "DMSetPointSF" 3502 /*@ 3503 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3504 3505 Input Parameters: 3506 + dm - The DM 3507 - sf - The PetscSF 3508 3509 Level: intermediate 3510 3511 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3512 @*/ 3513 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) { 3514 PetscErrorCode ierr; 3515 3516 PetscFunctionBegin; 3517 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3518 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3519 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3520 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3521 dm->sf = sf; 3522 PetscFunctionReturn(0); 3523 } 3524 3525 #undef __FUNCT__ 3526 #define __FUNCT__ "DMGetNumFields" 3527 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3528 { 3529 PetscFunctionBegin; 3530 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3531 PetscValidPointer(numFields, 2); 3532 *numFields = dm->numFields; 3533 PetscFunctionReturn(0); 3534 } 3535 3536 #undef __FUNCT__ 3537 #define __FUNCT__ "DMSetNumFields" 3538 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3539 { 3540 PetscInt f; 3541 PetscErrorCode ierr; 3542 3543 PetscFunctionBegin; 3544 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3545 for (f = 0; f < dm->numFields; ++f) { 3546 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3547 } 3548 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3549 dm->numFields = numFields; 3550 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3551 for (f = 0; f < dm->numFields; ++f) { 3552 ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3553 } 3554 PetscFunctionReturn(0); 3555 } 3556 3557 #undef __FUNCT__ 3558 #define __FUNCT__ "DMGetField" 3559 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3560 { 3561 PetscFunctionBegin; 3562 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3563 PetscValidPointer(field, 2); 3564 if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3565 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3566 *field = dm->fields[f]; 3567 PetscFunctionReturn(0); 3568 } 3569 3570 #undef __FUNCT__ 3571 #define __FUNCT__ "DMSetCoordinates" 3572 /*@ 3573 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3574 3575 Collective on DM 3576 3577 Input Parameters: 3578 + dm - the DM 3579 - c - coordinate vector 3580 3581 Note: 3582 The coordinates do include those for ghost points, which are in the local vector 3583 3584 Level: intermediate 3585 3586 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3587 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3588 @*/ 3589 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3590 { 3591 PetscErrorCode ierr; 3592 3593 PetscFunctionBegin; 3594 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3595 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3596 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3597 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3598 dm->coordinates = c; 3599 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3600 PetscFunctionReturn(0); 3601 } 3602 3603 #undef __FUNCT__ 3604 #define __FUNCT__ "DMSetCoordinatesLocal" 3605 /*@ 3606 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3607 3608 Collective on DM 3609 3610 Input Parameters: 3611 + dm - the DM 3612 - c - coordinate vector 3613 3614 Note: 3615 The coordinates of ghost points can be set using DMSetCoordinates() 3616 followed by DMGetCoordinatesLocal(). This is intended to enable the 3617 setting of ghost coordinates outside of the domain. 3618 3619 Level: intermediate 3620 3621 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3622 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3623 @*/ 3624 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3625 { 3626 PetscErrorCode ierr; 3627 3628 PetscFunctionBegin; 3629 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3630 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3631 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3632 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3633 dm->coordinatesLocal = c; 3634 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3635 PetscFunctionReturn(0); 3636 } 3637 3638 #undef __FUNCT__ 3639 #define __FUNCT__ "DMGetCoordinates" 3640 /*@ 3641 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3642 3643 Not Collective 3644 3645 Input Parameter: 3646 . dm - the DM 3647 3648 Output Parameter: 3649 . c - global coordinate vector 3650 3651 Note: 3652 This is a borrowed reference, so the user should NOT destroy this vector 3653 3654 Each process has only the local coordinates (does NOT have the ghost coordinates). 3655 3656 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3657 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3658 3659 Level: intermediate 3660 3661 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3662 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3663 @*/ 3664 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3665 { 3666 PetscErrorCode ierr; 3667 3668 PetscFunctionBegin; 3669 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3670 PetscValidPointer(c,2); 3671 if (!dm->coordinates && dm->coordinatesLocal) { 3672 DM cdm; 3673 3674 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3675 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3676 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3677 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3678 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3679 } 3680 *c = dm->coordinates; 3681 PetscFunctionReturn(0); 3682 } 3683 3684 #undef __FUNCT__ 3685 #define __FUNCT__ "DMGetCoordinatesLocal" 3686 /*@ 3687 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3688 3689 Collective on DM 3690 3691 Input Parameter: 3692 . dm - the DM 3693 3694 Output Parameter: 3695 . c - coordinate vector 3696 3697 Note: 3698 This is a borrowed reference, so the user should NOT destroy this vector 3699 3700 Each process has the local and ghost coordinates 3701 3702 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3703 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3704 3705 Level: intermediate 3706 3707 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3708 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3709 @*/ 3710 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3711 { 3712 PetscErrorCode ierr; 3713 3714 PetscFunctionBegin; 3715 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3716 PetscValidPointer(c,2); 3717 if (!dm->coordinatesLocal && dm->coordinates) { 3718 DM cdm; 3719 3720 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3721 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3722 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3723 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3724 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3725 } 3726 *c = dm->coordinatesLocal; 3727 PetscFunctionReturn(0); 3728 } 3729 3730 #undef __FUNCT__ 3731 #define __FUNCT__ "DMGetCoordinateDM" 3732 /*@ 3733 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3734 3735 Collective on DM 3736 3737 Input Parameter: 3738 . dm - the DM 3739 3740 Output Parameter: 3741 . cdm - coordinate DM 3742 3743 Level: intermediate 3744 3745 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3746 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3747 @*/ 3748 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3749 { 3750 PetscErrorCode ierr; 3751 3752 PetscFunctionBegin; 3753 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3754 PetscValidPointer(cdm,2); 3755 if (!dm->coordinateDM) { 3756 if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3757 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3758 } 3759 *cdm = dm->coordinateDM; 3760 PetscFunctionReturn(0); 3761 } 3762