1 #include <petscsnes.h> 2 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = PETSC_NULL; 35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 36 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 39 #endif 40 41 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 42 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 43 44 45 v->ltogmap = PETSC_NULL; 46 v->ltogmapb = PETSC_NULL; 47 v->bs = 1; 48 v->coloringtype = IS_COLORING_GLOBAL; 49 v->lf = PETSC_NULL; 50 v->lj = PETSC_NULL; 51 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 52 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 53 v->defaultSection = PETSC_NULL; 54 v->defaultGlobalSection = PETSC_NULL; 55 { 56 PetscInt i; 57 for (i = 0; i < 10; ++i) { 58 v->nullspaceConstructors[i] = PETSC_NULL; 59 } 60 } 61 v->numFields = 0; 62 v->fields = PETSC_NULL; 63 64 *dm = v; 65 PetscFunctionReturn(0); 66 } 67 68 #undef __FUNCT__ 69 #define __FUNCT__ "DMSetVecType" 70 /*@C 71 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 72 73 Logically Collective on DMDA 74 75 Input Parameter: 76 + da - initial distributed array 77 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 78 79 Options Database: 80 . -dm_vec_type ctype 81 82 Level: intermediate 83 84 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 85 @*/ 86 PetscErrorCode DMSetVecType(DM da,VecType ctype) 87 { 88 PetscErrorCode ierr; 89 90 PetscFunctionBegin; 91 PetscValidHeaderSpecific(da,DM_CLASSID,1); 92 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 93 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 94 PetscFunctionReturn(0); 95 } 96 97 #undef __FUNCT__ 98 #define __FUNCT__ "VecGetDM" 99 /*@ 100 VecGetDM - Gets the DM defining the data layout of the vector 101 102 Not collective 103 104 Input Parameter: 105 . v - The Vec 106 107 Output Parameter: 108 . dm - The DM 109 110 Level: intermediate 111 112 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 113 @*/ 114 PetscErrorCode VecGetDM(Vec v, DM *dm) 115 { 116 PetscErrorCode ierr; 117 118 PetscFunctionBegin; 119 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 120 PetscValidPointer(dm,2); 121 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr); 122 PetscFunctionReturn(0); 123 } 124 125 #undef __FUNCT__ 126 #define __FUNCT__ "VecSetDM" 127 /*@ 128 VecSetDM - Sets the DM defining the data layout of the vector 129 130 Not collective 131 132 Input Parameters: 133 + v - The Vec 134 - dm - The DM 135 136 Level: intermediate 137 138 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 139 @*/ 140 PetscErrorCode VecSetDM(Vec v, DM dm) 141 { 142 PetscErrorCode ierr; 143 144 PetscFunctionBegin; 145 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 146 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 147 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 148 PetscFunctionReturn(0); 149 } 150 151 #undef __FUNCT__ 152 #define __FUNCT__ "DMSetMatType" 153 /*@C 154 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 155 156 Logically Collective on DM 157 158 Input Parameter: 159 + dm - the DM context 160 . ctype - the matrix type 161 162 Options Database: 163 . -dm_mat_type ctype 164 165 Level: intermediate 166 167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 168 @*/ 169 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 170 { 171 PetscErrorCode ierr; 172 PetscFunctionBegin; 173 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 174 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 175 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 176 PetscFunctionReturn(0); 177 } 178 179 #undef __FUNCT__ 180 #define __FUNCT__ "MatGetDM" 181 /*@ 182 MatGetDM - Gets the DM defining the data layout of the matrix 183 184 Not collective 185 186 Input Parameter: 187 . A - The Mat 188 189 Output Parameter: 190 . dm - The DM 191 192 Level: intermediate 193 194 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 195 @*/ 196 PetscErrorCode MatGetDM(Mat A, DM *dm) 197 { 198 PetscErrorCode ierr; 199 200 PetscFunctionBegin; 201 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 202 PetscValidPointer(dm,2); 203 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr); 204 PetscFunctionReturn(0); 205 } 206 207 #undef __FUNCT__ 208 #define __FUNCT__ "MatSetDM" 209 /*@ 210 MatSetDM - Sets the DM defining the data layout of the matrix 211 212 Not collective 213 214 Input Parameters: 215 + A - The Mat 216 - dm - The DM 217 218 Level: intermediate 219 220 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 221 @*/ 222 PetscErrorCode MatSetDM(Mat A, DM dm) 223 { 224 PetscErrorCode ierr; 225 226 PetscFunctionBegin; 227 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 228 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 229 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 230 PetscFunctionReturn(0); 231 } 232 233 #undef __FUNCT__ 234 #define __FUNCT__ "DMSetOptionsPrefix" 235 /*@C 236 DMSetOptionsPrefix - Sets the prefix used for searching for all 237 DMDA options in the database. 238 239 Logically Collective on DMDA 240 241 Input Parameter: 242 + da - the DMDA context 243 - prefix - the prefix to prepend to all option names 244 245 Notes: 246 A hyphen (-) must NOT be given at the beginning of the prefix name. 247 The first character of all runtime options is AUTOMATICALLY the hyphen. 248 249 Level: advanced 250 251 .keywords: DMDA, set, options, prefix, database 252 253 .seealso: DMSetFromOptions() 254 @*/ 255 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 256 { 257 PetscErrorCode ierr; 258 259 PetscFunctionBegin; 260 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 261 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 262 PetscFunctionReturn(0); 263 } 264 265 #undef __FUNCT__ 266 #define __FUNCT__ "DMDestroy" 267 /*@ 268 DMDestroy - Destroys a vector packer or DMDA. 269 270 Collective on DM 271 272 Input Parameter: 273 . dm - the DM object to destroy 274 275 Level: developer 276 277 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 278 279 @*/ 280 PetscErrorCode DMDestroy(DM *dm) 281 { 282 PetscInt i, cnt = 0, f; 283 DMNamedVecLink nlink,nnext; 284 PetscErrorCode ierr; 285 286 PetscFunctionBegin; 287 if (!*dm) PetscFunctionReturn(0); 288 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 289 290 /* count all the circular references of DM and its contained Vecs */ 291 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 292 if ((*dm)->localin[i]) {cnt++;} 293 if ((*dm)->globalin[i]) {cnt++;} 294 } 295 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 296 if ((*dm)->x) { 297 DM obj; 298 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 299 if (obj == *dm) cnt++; 300 } 301 302 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 303 /* 304 Need this test because the dm references the vectors that 305 reference the dm, so destroying the dm calls destroy on the 306 vectors that cause another destroy on the dm 307 */ 308 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 309 ((PetscObject) (*dm))->refct = 0; 310 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 311 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 312 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 313 } 314 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 315 nnext = nlink->next; 316 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 317 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 318 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 319 ierr = PetscFree(nlink);CHKERRQ(ierr); 320 } 321 (*dm)->namedglobal = PETSC_NULL; 322 323 /* Destroy the list of hooks */ 324 { 325 DMCoarsenHookLink link,next; 326 for (link=(*dm)->coarsenhook; link; link=next) { 327 next = link->next; 328 ierr = PetscFree(link);CHKERRQ(ierr); 329 } 330 (*dm)->coarsenhook = PETSC_NULL; 331 } 332 { 333 DMRefineHookLink link,next; 334 for (link=(*dm)->refinehook; link; link=next) { 335 next = link->next; 336 ierr = PetscFree(link);CHKERRQ(ierr); 337 } 338 (*dm)->refinehook = PETSC_NULL; 339 } 340 /* Destroy the work arrays */ 341 { 342 DMWorkLink link,next; 343 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 344 for (link=(*dm)->workin; link; link=next) { 345 next = link->next; 346 ierr = PetscFree(link->mem);CHKERRQ(ierr); 347 ierr = PetscFree(link);CHKERRQ(ierr); 348 } 349 (*dm)->workin = PETSC_NULL; 350 } 351 352 if ((*dm)->ctx && (*dm)->ctxdestroy) { 353 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 354 } 355 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 356 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 357 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 358 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 359 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 360 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 361 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 362 363 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 364 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 365 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 366 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 367 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 368 369 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 370 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 371 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 372 373 for (f = 0; f < (*dm)->numFields; ++f) { 374 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 375 } 376 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 377 /* if memory was published with AMS then destroy it */ 378 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 379 380 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 381 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 382 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 383 PetscFunctionReturn(0); 384 } 385 386 #undef __FUNCT__ 387 #define __FUNCT__ "DMSetUp" 388 /*@ 389 DMSetUp - sets up the data structures inside a DM object 390 391 Collective on DM 392 393 Input Parameter: 394 . dm - the DM object to setup 395 396 Level: developer 397 398 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 399 400 @*/ 401 PetscErrorCode DMSetUp(DM dm) 402 { 403 PetscErrorCode ierr; 404 405 PetscFunctionBegin; 406 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 407 if (dm->setupcalled) PetscFunctionReturn(0); 408 if (dm->ops->setup) { 409 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 410 } 411 dm->setupcalled = PETSC_TRUE; 412 PetscFunctionReturn(0); 413 } 414 415 #undef __FUNCT__ 416 #define __FUNCT__ "DMSetFromOptions" 417 /*@ 418 DMSetFromOptions - sets parameters in a DM from the options database 419 420 Collective on DM 421 422 Input Parameter: 423 . dm - the DM object to set options for 424 425 Options Database: 426 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 427 . -dm_vec_type <type> type of vector to create inside DM 428 . -dm_mat_type <type> type of matrix to create inside DM 429 - -dm_coloring_type <global or ghosted> 430 431 Level: developer 432 433 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 434 435 @*/ 436 PetscErrorCode DMSetFromOptions(DM dm) 437 { 438 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 439 PetscErrorCode ierr; 440 char typeName[256] = MATAIJ; 441 442 PetscFunctionBegin; 443 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 444 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 445 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 446 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 447 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 448 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 449 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 450 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 451 if (flg) { 452 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 453 } 454 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 455 if (flg) { 456 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 457 } 458 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 459 if (dm->ops->setfromoptions) { 460 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 461 } 462 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 463 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 464 ierr = PetscOptionsEnd();CHKERRQ(ierr); 465 if (flg1) { 466 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 467 } 468 if (flg2) { 469 PetscViewer viewer; 470 471 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 472 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 473 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 474 ierr = DMView(dm, viewer);CHKERRQ(ierr); 475 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 476 } 477 if (flg3) { 478 PetscViewer viewer; 479 480 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 481 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 482 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 483 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 484 ierr = DMView(dm, viewer);CHKERRQ(ierr); 485 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 486 } 487 if (flg4) { 488 PetscViewer viewer; 489 490 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 491 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 492 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 493 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 494 ierr = DMView(dm, viewer);CHKERRQ(ierr); 495 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 496 } 497 PetscFunctionReturn(0); 498 } 499 500 #undef __FUNCT__ 501 #define __FUNCT__ "DMView" 502 /*@C 503 DMView - Views a vector packer or DMDA. 504 505 Collective on DM 506 507 Input Parameter: 508 + dm - the DM object to view 509 - v - the viewer 510 511 Level: developer 512 513 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 514 515 @*/ 516 PetscErrorCode DMView(DM dm,PetscViewer v) 517 { 518 PetscErrorCode ierr; 519 PetscBool isbinary; 520 521 PetscFunctionBegin; 522 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 523 if (!v) { 524 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 525 } 526 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 527 if (isbinary) { 528 PetscInt classid = DM_FILE_CLASSID; 529 MPI_Comm comm; 530 PetscMPIInt rank; 531 char type[256]; 532 533 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 534 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 535 if (!rank) { 536 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 537 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 538 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 539 } 540 } 541 if (dm->ops->view) { 542 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 543 } 544 PetscFunctionReturn(0); 545 } 546 547 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer); 548 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer); 549 550 #undef __FUNCT__ 551 #define __FUNCT__ "DMCreateGlobalVector" 552 /*@ 553 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 554 555 Collective on DM 556 557 Input Parameter: 558 . dm - the DM object 559 560 Output Parameter: 561 . vec - the global vector 562 563 Level: beginner 564 565 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 566 567 @*/ 568 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 569 { 570 PetscErrorCode ierr; 571 572 PetscFunctionBegin; 573 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 574 if (dm->defaultSection) { 575 PetscSection gSection; 576 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 577 578 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 579 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 580 for(p = pStart; p < pEnd; ++p) { 581 PetscInt dof, cdof; 582 583 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 584 ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr); 585 if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof; 586 if ((dof > 0) && (dof-cdof != blockSize)) { 587 blockSize = 1; 588 break; 589 } 590 } 591 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 592 if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize); 593 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 594 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 595 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 596 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 597 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 598 ierr = VecSetDM(*vec, dm);CHKERRQ(ierr); 599 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 600 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 601 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 602 ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr); 603 } else { 604 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 605 } 606 PetscFunctionReturn(0); 607 } 608 609 #undef __FUNCT__ 610 #define __FUNCT__ "DMCreateLocalVector" 611 /*@ 612 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 613 614 Not Collective 615 616 Input Parameter: 617 . dm - the DM object 618 619 Output Parameter: 620 . vec - the local vector 621 622 Level: beginner 623 624 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 625 626 @*/ 627 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 628 { 629 PetscErrorCode ierr; 630 631 PetscFunctionBegin; 632 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 633 if (dm->defaultSection) { 634 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 635 636 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 637 for(p = pStart; p < pEnd; ++p) { 638 PetscInt dof; 639 640 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 641 if ((blockSize < 0) && (dof > 0)) blockSize = dof; 642 if ((dof > 0) && (dof != blockSize)) { 643 blockSize = 1; 644 break; 645 } 646 } 647 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 648 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 649 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 650 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 651 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 652 ierr = VecSetDM(*vec, dm);CHKERRQ(ierr); 653 ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr); 654 } else { 655 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 656 } 657 PetscFunctionReturn(0); 658 } 659 660 #undef __FUNCT__ 661 #define __FUNCT__ "DMGetLocalToGlobalMapping" 662 /*@ 663 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 664 665 Collective on DM 666 667 Input Parameter: 668 . dm - the DM that provides the mapping 669 670 Output Parameter: 671 . ltog - the mapping 672 673 Level: intermediate 674 675 Notes: 676 This mapping can then be used by VecSetLocalToGlobalMapping() or 677 MatSetLocalToGlobalMapping(). 678 679 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 680 @*/ 681 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 682 { 683 PetscErrorCode ierr; 684 685 PetscFunctionBegin; 686 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 687 PetscValidPointer(ltog,2); 688 if (!dm->ltogmap) { 689 PetscSection section, sectionGlobal; 690 691 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 692 if (section) { 693 PetscInt *ltog; 694 PetscInt pStart, pEnd, size, p, l; 695 696 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 697 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 698 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 699 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 700 for (p = pStart, l = 0; p < pEnd; ++p) { 701 PetscInt dof, off, c; 702 703 /* Should probably use constrained dofs */ 704 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 705 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 706 for (c = 0; c < dof; ++c, ++l) { 707 ltog[l] = off+c; 708 } 709 } 710 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 711 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 712 } else { 713 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 714 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 715 } 716 } 717 *ltog = dm->ltogmap; 718 PetscFunctionReturn(0); 719 } 720 721 #undef __FUNCT__ 722 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 723 /*@ 724 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 725 726 Collective on DM 727 728 Input Parameter: 729 . da - the distributed array that provides the mapping 730 731 Output Parameter: 732 . ltog - the block mapping 733 734 Level: intermediate 735 736 Notes: 737 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 738 MatSetLocalToGlobalMappingBlock(). 739 740 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 741 @*/ 742 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 743 { 744 PetscErrorCode ierr; 745 746 PetscFunctionBegin; 747 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 748 PetscValidPointer(ltog,2); 749 if (!dm->ltogmapb) { 750 PetscInt bs; 751 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 752 if (bs > 1) { 753 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 754 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 755 } else { 756 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 757 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 758 } 759 } 760 *ltog = dm->ltogmapb; 761 PetscFunctionReturn(0); 762 } 763 764 #undef __FUNCT__ 765 #define __FUNCT__ "DMGetBlockSize" 766 /*@ 767 DMGetBlockSize - Gets the inherent block size associated with a DM 768 769 Not Collective 770 771 Input Parameter: 772 . dm - the DM with block structure 773 774 Output Parameter: 775 . bs - the block size, 1 implies no exploitable block structure 776 777 Level: intermediate 778 779 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 780 @*/ 781 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 782 { 783 PetscFunctionBegin; 784 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 785 PetscValidPointer(bs,2); 786 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 787 *bs = dm->bs; 788 PetscFunctionReturn(0); 789 } 790 791 #undef __FUNCT__ 792 #define __FUNCT__ "DMCreateInterpolation" 793 /*@ 794 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 795 796 Collective on DM 797 798 Input Parameter: 799 + dm1 - the DM object 800 - dm2 - the second, finer DM object 801 802 Output Parameter: 803 + mat - the interpolation 804 - vec - the scaling (optional) 805 806 Level: developer 807 808 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 809 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 810 811 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 812 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 813 814 815 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 816 817 @*/ 818 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 819 { 820 PetscErrorCode ierr; 821 822 PetscFunctionBegin; 823 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 824 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 825 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 826 PetscFunctionReturn(0); 827 } 828 829 #undef __FUNCT__ 830 #define __FUNCT__ "DMCreateInjection" 831 /*@ 832 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 833 834 Collective on DM 835 836 Input Parameter: 837 + dm1 - the DM object 838 - dm2 - the second, finer DM object 839 840 Output Parameter: 841 . ctx - the injection 842 843 Level: developer 844 845 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 846 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 847 848 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 849 850 @*/ 851 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 852 { 853 PetscErrorCode ierr; 854 855 PetscFunctionBegin; 856 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 857 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 858 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 859 PetscFunctionReturn(0); 860 } 861 862 #undef __FUNCT__ 863 #define __FUNCT__ "DMCreateColoring" 864 /*@C 865 DMCreateColoring - Gets coloring for a DMDA or DMComposite 866 867 Collective on DM 868 869 Input Parameter: 870 + dm - the DM object 871 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 872 - matype - either MATAIJ or MATBAIJ 873 874 Output Parameter: 875 . coloring - the coloring 876 877 Level: developer 878 879 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 880 881 @*/ 882 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 883 { 884 PetscErrorCode ierr; 885 886 PetscFunctionBegin; 887 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 888 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 889 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 890 PetscFunctionReturn(0); 891 } 892 893 #undef __FUNCT__ 894 #define __FUNCT__ "DMCreateMatrix" 895 /*@C 896 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 897 898 Collective on DM 899 900 Input Parameter: 901 + dm - the DM object 902 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 903 any type which inherits from one of these (such as MATAIJ) 904 905 Output Parameter: 906 . mat - the empty Jacobian 907 908 Level: beginner 909 910 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 911 do not need to do it yourself. 912 913 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 914 the nonzero pattern call DMDASetMatPreallocateOnly() 915 916 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 917 internally by PETSc. 918 919 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 920 the indices for the global numbering for DMDAs which is complicated. 921 922 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 923 924 @*/ 925 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 926 { 927 PetscErrorCode ierr; 928 929 PetscFunctionBegin; 930 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 931 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 932 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 933 #endif 934 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 935 PetscValidPointer(mat,3); 936 if (dm->mattype) { 937 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 938 } else { 939 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 940 } 941 PetscFunctionReturn(0); 942 } 943 944 #undef __FUNCT__ 945 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 946 /*@ 947 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 948 preallocated but the nonzero structure and zero values will not be set. 949 950 Logically Collective on DMDA 951 952 Input Parameter: 953 + dm - the DM 954 - only - PETSC_TRUE if only want preallocation 955 956 Level: developer 957 .seealso DMCreateMatrix() 958 @*/ 959 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 960 { 961 PetscFunctionBegin; 962 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 963 dm->prealloc_only = only; 964 PetscFunctionReturn(0); 965 } 966 967 #undef __FUNCT__ 968 #define __FUNCT__ "DMGetWorkArray" 969 /*@C 970 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 971 972 Not Collective 973 974 Input Parameters: 975 + dm - the DM object 976 . count - The minium size 977 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 978 979 Output Parameter: 980 . array - the work array 981 982 Level: developer 983 984 .seealso DMDestroy(), DMCreate() 985 @*/ 986 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 987 { 988 PetscErrorCode ierr; 989 DMWorkLink link; 990 size_t size; 991 992 PetscFunctionBegin; 993 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 994 PetscValidPointer(mem,4); 995 if (dm->workin) { 996 link = dm->workin; 997 dm->workin = dm->workin->next; 998 } else { 999 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 1000 } 1001 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1002 if (size*count > link->bytes) { 1003 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1004 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1005 link->bytes = size*count; 1006 } 1007 link->next = dm->workout; 1008 dm->workout = link; 1009 *(void**)mem = link->mem; 1010 PetscFunctionReturn(0); 1011 } 1012 1013 #undef __FUNCT__ 1014 #define __FUNCT__ "DMRestoreWorkArray" 1015 /*@C 1016 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1017 1018 Not Collective 1019 1020 Input Parameters: 1021 + dm - the DM object 1022 . count - The minium size 1023 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1024 1025 Output Parameter: 1026 . array - the work array 1027 1028 Level: developer 1029 1030 .seealso DMDestroy(), DMCreate() 1031 @*/ 1032 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1033 { 1034 DMWorkLink *p,link; 1035 1036 PetscFunctionBegin; 1037 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1038 PetscValidPointer(mem,4); 1039 for (p=&dm->workout; (link=*p); p=&link->next) { 1040 if (link->mem == *(void**)mem) { 1041 *p = link->next; 1042 link->next = dm->workin; 1043 dm->workin = link; 1044 *(void**)mem = PETSC_NULL; 1045 PetscFunctionReturn(0); 1046 } 1047 } 1048 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1049 PetscFunctionReturn(0); 1050 } 1051 1052 #undef __FUNCT__ 1053 #define __FUNCT__ "DMSetNullSpaceConstructor" 1054 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1055 { 1056 PetscFunctionBegin; 1057 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1058 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1059 dm->nullspaceConstructors[field] = nullsp; 1060 PetscFunctionReturn(0); 1061 } 1062 1063 #undef __FUNCT__ 1064 #define __FUNCT__ "DMCreateFieldIS" 1065 /*@C 1066 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1067 1068 Not collective 1069 1070 Input Parameter: 1071 . dm - the DM object 1072 1073 Output Parameters: 1074 + numFields - The number of fields (or PETSC_NULL if not requested) 1075 . fieldNames - The name for each field (or PETSC_NULL if not requested) 1076 - fields - The global indices for each field (or PETSC_NULL if not requested) 1077 1078 Level: intermediate 1079 1080 Notes: 1081 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1082 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1083 PetscFree(). 1084 1085 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1086 @*/ 1087 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1088 { 1089 PetscSection section, sectionGlobal; 1090 PetscErrorCode ierr; 1091 1092 PetscFunctionBegin; 1093 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1094 if (numFields) { 1095 PetscValidPointer(numFields,2); 1096 *numFields = 0; 1097 } 1098 if (fieldNames) { 1099 PetscValidPointer(fieldNames,3); 1100 *fieldNames = PETSC_NULL; 1101 } 1102 if (fields) { 1103 PetscValidPointer(fields,4); 1104 *fields = PETSC_NULL; 1105 } 1106 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1107 if (section) { 1108 PetscInt *fieldSizes, **fieldIndices; 1109 PetscInt nF, f, pStart, pEnd, p; 1110 1111 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1112 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1113 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 1114 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1115 for (f = 0; f < nF; ++f) { 1116 fieldSizes[f] = 0; 1117 } 1118 for (p = pStart; p < pEnd; ++p) { 1119 PetscInt gdof; 1120 1121 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1122 if (gdof > 0) { 1123 for (f = 0; f < nF; ++f) { 1124 PetscInt fdof, fcdof; 1125 1126 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1127 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1128 fieldSizes[f] += fdof-fcdof; 1129 } 1130 } 1131 } 1132 for (f = 0; f < nF; ++f) { 1133 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1134 fieldSizes[f] = 0; 1135 } 1136 for (p = pStart; p < pEnd; ++p) { 1137 PetscInt gdof, goff; 1138 1139 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1140 if (gdof > 0) { 1141 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1142 for (f = 0; f < nF; ++f) { 1143 PetscInt fdof, fcdof, fc; 1144 1145 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1146 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1147 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1148 fieldIndices[f][fieldSizes[f]] = goff++; 1149 } 1150 } 1151 } 1152 } 1153 if (numFields) {*numFields = nF;} 1154 if (fieldNames) { 1155 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 1156 for (f = 0; f < nF; ++f) { 1157 const char *fieldName; 1158 1159 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1160 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 1161 } 1162 } 1163 if (fields) { 1164 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1165 for (f = 0; f < nF; ++f) { 1166 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1167 } 1168 } 1169 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1170 } else { 1171 if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 1172 } 1173 PetscFunctionReturn(0); 1174 } 1175 1176 #undef __FUNCT__ 1177 #define __FUNCT__ "DMCreateFieldDecompositionDM" 1178 /*@C 1179 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 1180 1181 Not Collective 1182 1183 Input Parameters: 1184 + dm - the DM object 1185 - name - the name of the field decomposition 1186 1187 Output Parameter: 1188 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 1189 1190 Level: advanced 1191 1192 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1193 @*/ 1194 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 1195 { 1196 PetscErrorCode ierr; 1197 1198 PetscFunctionBegin; 1199 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1200 PetscValidCharPointer(name,2); 1201 PetscValidPointer(ddm,3); 1202 *ddm = PETSC_NULL; 1203 if (dm->ops->createfielddecompositiondm) { 1204 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1205 } 1206 PetscFunctionReturn(0); 1207 } 1208 1209 #undef __FUNCT__ 1210 #define __FUNCT__ "DMCreateFieldDecomposition" 1211 /*@C 1212 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1213 corresponding to different fields: each IS contains the global indices of the dofs of the 1214 corresponding field. The optional list of DMs define the DM for each subproblem. 1215 Generalizes DMCreateFieldIS(). 1216 1217 Not collective 1218 1219 Input Parameter: 1220 . dm - the DM object 1221 1222 Output Parameters: 1223 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1224 . namelist - The name for each field (or PETSC_NULL if not requested) 1225 . islist - The global indices for each field (or PETSC_NULL if not requested) 1226 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1227 1228 Level: intermediate 1229 1230 Notes: 1231 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1232 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1233 and all of the arrays should be freed with PetscFree(). 1234 1235 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1236 @*/ 1237 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1238 { 1239 PetscErrorCode ierr; 1240 1241 PetscFunctionBegin; 1242 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1243 if (len) {PetscValidPointer(len,2); *len = 0;} 1244 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1245 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1246 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1247 if (!dm->ops->createfielddecomposition) { 1248 PetscSection section; 1249 PetscInt numFields, f; 1250 1251 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1252 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1253 if (section && numFields && dm->ops->createsubdm) { 1254 *len = numFields; 1255 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1256 for (f = 0; f < numFields; ++f) { 1257 const char *fieldName; 1258 1259 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1260 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1261 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1262 } 1263 } else { 1264 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1265 /* By default there are no DMs associated with subproblems. */ 1266 if (dmlist) *dmlist = PETSC_NULL; 1267 } 1268 } 1269 else { 1270 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1271 } 1272 PetscFunctionReturn(0); 1273 } 1274 1275 #undef __FUNCT__ 1276 #define __FUNCT__ "DMCreateSubDM" 1277 /*@C 1278 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1279 The fields are defined by DMCreateFieldIS(). 1280 1281 Not collective 1282 1283 Input Parameters: 1284 + dm - the DM object 1285 . numFields - number of fields in this subproblem 1286 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1287 1288 Output Parameters: 1289 . is - The global indices for the subproblem 1290 - dm - The DM for the subproblem 1291 1292 Level: intermediate 1293 1294 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1295 @*/ 1296 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1297 { 1298 PetscErrorCode ierr; 1299 1300 PetscFunctionBegin; 1301 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1302 PetscValidPointer(fields,3); 1303 if (is) {PetscValidPointer(is,4);} 1304 if (subdm) {PetscValidPointer(subdm,5);} 1305 if (dm->ops->createsubdm) { 1306 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1307 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1308 PetscFunctionReturn(0); 1309 } 1310 1311 #undef __FUNCT__ 1312 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1313 /*@C 1314 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1315 1316 Not Collective 1317 1318 Input Parameters: 1319 + dm - the DM object 1320 - name - the name of the subdomain decomposition 1321 1322 Output Parameter: 1323 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1324 1325 Level: advanced 1326 1327 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1328 @*/ 1329 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1330 { 1331 PetscErrorCode ierr; 1332 1333 PetscFunctionBegin; 1334 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1335 PetscValidCharPointer(name,2); 1336 PetscValidPointer(ddm,3); 1337 *ddm = PETSC_NULL; 1338 if (dm->ops->createdomaindecompositiondm) { 1339 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1340 } 1341 PetscFunctionReturn(0); 1342 } 1343 1344 #undef __FUNCT__ 1345 #define __FUNCT__ "DMCreateDomainDecomposition" 1346 /*@C 1347 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1348 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1349 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1350 define a nonoverlapping covering, while outer subdomains can overlap. 1351 The optional list of DMs define the DM for each subproblem. 1352 1353 Not collective 1354 1355 Input Parameter: 1356 . dm - the DM object 1357 1358 Output Parameters: 1359 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1360 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1361 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1362 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1363 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1364 1365 Level: intermediate 1366 1367 Notes: 1368 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1369 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1370 and all of the arrays should be freed with PetscFree(). 1371 1372 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1373 @*/ 1374 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1375 { 1376 PetscErrorCode ierr; 1377 1378 PetscFunctionBegin; 1379 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1380 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1381 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1382 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1383 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1384 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1385 if (dm->ops->createdomaindecomposition) { 1386 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1387 } 1388 PetscFunctionReturn(0); 1389 } 1390 1391 #undef __FUNCT__ 1392 #define __FUNCT__ "DMRefine" 1393 /*@ 1394 DMRefine - Refines a DM object 1395 1396 Collective on DM 1397 1398 Input Parameter: 1399 + dm - the DM object 1400 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1401 1402 Output Parameter: 1403 . dmf - the refined DM, or PETSC_NULL 1404 1405 Note: If no refinement was done, the return value is PETSC_NULL 1406 1407 Level: developer 1408 1409 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1410 @*/ 1411 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1412 { 1413 PetscErrorCode ierr; 1414 DMRefineHookLink link; 1415 1416 PetscFunctionBegin; 1417 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1418 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1419 if (*dmf) { 1420 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1421 (*dmf)->ops->initialguess = dm->ops->initialguess; 1422 (*dmf)->ops->function = dm->ops->function; 1423 (*dmf)->ops->functionj = dm->ops->functionj; 1424 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1425 (*dmf)->ops->jacobian = dm->ops->jacobian; 1426 } 1427 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1428 (*dmf)->ctx = dm->ctx; 1429 (*dmf)->leveldown = dm->leveldown; 1430 (*dmf)->levelup = dm->levelup + 1; 1431 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1432 for (link=dm->refinehook; link; link=link->next) { 1433 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1434 } 1435 } 1436 PetscFunctionReturn(0); 1437 } 1438 1439 #undef __FUNCT__ 1440 #define __FUNCT__ "DMRefineHookAdd" 1441 /*@ 1442 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1443 1444 Logically Collective 1445 1446 Input Arguments: 1447 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1448 . refinehook - function to run when setting up a coarser level 1449 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1450 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1451 1452 Calling sequence of refinehook: 1453 $ refinehook(DM coarse,DM fine,void *ctx); 1454 1455 + coarse - coarse level DM 1456 . fine - fine level DM to interpolate problem to 1457 - ctx - optional user-defined function context 1458 1459 Calling sequence for interphook: 1460 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1461 1462 + coarse - coarse level DM 1463 . interp - matrix interpolating a coarse-level solution to the finer grid 1464 . fine - fine level DM to update 1465 - ctx - optional user-defined function context 1466 1467 Level: advanced 1468 1469 Notes: 1470 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1471 1472 If this function is called multiple times, the hooks will be run in the order they are added. 1473 1474 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1475 @*/ 1476 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1477 { 1478 PetscErrorCode ierr; 1479 DMRefineHookLink link,*p; 1480 1481 PetscFunctionBegin; 1482 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1483 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1484 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1485 link->refinehook = refinehook; 1486 link->interphook = interphook; 1487 link->ctx = ctx; 1488 link->next = PETSC_NULL; 1489 *p = link; 1490 PetscFunctionReturn(0); 1491 } 1492 1493 #undef __FUNCT__ 1494 #define __FUNCT__ "DMInterpolate" 1495 /*@ 1496 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1497 1498 Collective if any hooks are 1499 1500 Input Arguments: 1501 + coarse - coarser DM to use as a base 1502 . restrct - interpolation matrix, apply using MatInterpolate() 1503 - fine - finer DM to update 1504 1505 Level: developer 1506 1507 .seealso: DMRefineHookAdd(), MatInterpolate() 1508 @*/ 1509 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1510 { 1511 PetscErrorCode ierr; 1512 DMRefineHookLink link; 1513 1514 PetscFunctionBegin; 1515 for (link=fine->refinehook; link; link=link->next) { 1516 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1517 } 1518 PetscFunctionReturn(0); 1519 } 1520 1521 #undef __FUNCT__ 1522 #define __FUNCT__ "DMGetRefineLevel" 1523 /*@ 1524 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1525 1526 Not Collective 1527 1528 Input Parameter: 1529 . dm - the DM object 1530 1531 Output Parameter: 1532 . level - number of refinements 1533 1534 Level: developer 1535 1536 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1537 1538 @*/ 1539 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1540 { 1541 PetscFunctionBegin; 1542 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1543 *level = dm->levelup; 1544 PetscFunctionReturn(0); 1545 } 1546 1547 #undef __FUNCT__ 1548 #define __FUNCT__ "DMGlobalToLocalBegin" 1549 /*@ 1550 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1551 1552 Neighbor-wise Collective on DM 1553 1554 Input Parameters: 1555 + dm - the DM object 1556 . g - the global vector 1557 . mode - INSERT_VALUES or ADD_VALUES 1558 - l - the local vector 1559 1560 1561 Level: beginner 1562 1563 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1564 1565 @*/ 1566 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1567 { 1568 PetscSF sf; 1569 PetscErrorCode ierr; 1570 1571 PetscFunctionBegin; 1572 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1573 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1574 if (sf) { 1575 PetscScalar *lArray, *gArray; 1576 1577 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1578 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1579 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1580 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1581 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1582 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1583 } else { 1584 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1585 } 1586 PetscFunctionReturn(0); 1587 } 1588 1589 #undef __FUNCT__ 1590 #define __FUNCT__ "DMGlobalToLocalEnd" 1591 /*@ 1592 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1593 1594 Neighbor-wise Collective on DM 1595 1596 Input Parameters: 1597 + dm - the DM object 1598 . g - the global vector 1599 . mode - INSERT_VALUES or ADD_VALUES 1600 - l - the local vector 1601 1602 1603 Level: beginner 1604 1605 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1606 1607 @*/ 1608 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1609 { 1610 PetscSF sf; 1611 PetscErrorCode ierr; 1612 PetscScalar *lArray, *gArray; 1613 1614 PetscFunctionBegin; 1615 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1616 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1617 if (sf) { 1618 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1619 1620 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1621 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1622 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1623 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1624 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1625 } else { 1626 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1627 } 1628 PetscFunctionReturn(0); 1629 } 1630 1631 #undef __FUNCT__ 1632 #define __FUNCT__ "DMLocalToGlobalBegin" 1633 /*@ 1634 DMLocalToGlobalBegin - updates global vectors from local vectors 1635 1636 Neighbor-wise Collective on DM 1637 1638 Input Parameters: 1639 + dm - the DM object 1640 . l - the local vector 1641 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1642 base point. 1643 - - the global vector 1644 1645 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1646 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1647 global array to the final global array with VecAXPY(). 1648 1649 Level: beginner 1650 1651 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1652 1653 @*/ 1654 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1655 { 1656 PetscSF sf; 1657 PetscErrorCode ierr; 1658 1659 PetscFunctionBegin; 1660 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1661 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1662 if (sf) { 1663 MPI_Op op; 1664 PetscScalar *lArray, *gArray; 1665 1666 switch(mode) { 1667 case INSERT_VALUES: 1668 case INSERT_ALL_VALUES: 1669 #if defined(PETSC_HAVE_MPI_REPLACE) 1670 op = MPI_REPLACE; break; 1671 #else 1672 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1673 #endif 1674 case ADD_VALUES: 1675 case ADD_ALL_VALUES: 1676 op = MPI_SUM; break; 1677 default: 1678 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1679 } 1680 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1681 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1682 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1683 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1684 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1685 } else { 1686 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1687 } 1688 PetscFunctionReturn(0); 1689 } 1690 1691 #undef __FUNCT__ 1692 #define __FUNCT__ "DMLocalToGlobalEnd" 1693 /*@ 1694 DMLocalToGlobalEnd - updates global vectors from local vectors 1695 1696 Neighbor-wise Collective on DM 1697 1698 Input Parameters: 1699 + dm - the DM object 1700 . l - the local vector 1701 . mode - INSERT_VALUES or ADD_VALUES 1702 - g - the global vector 1703 1704 1705 Level: beginner 1706 1707 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1708 1709 @*/ 1710 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1711 { 1712 PetscSF sf; 1713 PetscErrorCode ierr; 1714 1715 PetscFunctionBegin; 1716 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1717 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1718 if (sf) { 1719 MPI_Op op; 1720 PetscScalar *lArray, *gArray; 1721 1722 switch(mode) { 1723 case INSERT_VALUES: 1724 case INSERT_ALL_VALUES: 1725 #if defined(PETSC_HAVE_MPI_REPLACE) 1726 op = MPI_REPLACE; break; 1727 #else 1728 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1729 #endif 1730 case ADD_VALUES: 1731 case ADD_ALL_VALUES: 1732 op = MPI_SUM; break; 1733 default: 1734 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1735 } 1736 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1737 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1738 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1739 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1740 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1741 } else { 1742 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1743 } 1744 PetscFunctionReturn(0); 1745 } 1746 1747 #undef __FUNCT__ 1748 #define __FUNCT__ "DMComputeJacobianDefault" 1749 /*@ 1750 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1751 1752 Collective on DM 1753 1754 Input Parameter: 1755 + dm - the DM object 1756 . x - location to compute Jacobian at; may be ignored for linear problems 1757 . A - matrix that defines the operator for the linear solve 1758 - B - the matrix used to construct the preconditioner 1759 1760 Level: developer 1761 1762 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1763 DMSetFunction() 1764 1765 @*/ 1766 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1767 { 1768 PetscErrorCode ierr; 1769 1770 PetscFunctionBegin; 1771 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1772 *stflag = SAME_NONZERO_PATTERN; 1773 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1774 if (A != B) { 1775 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1776 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1777 } 1778 PetscFunctionReturn(0); 1779 } 1780 1781 #undef __FUNCT__ 1782 #define __FUNCT__ "DMCoarsen" 1783 /*@ 1784 DMCoarsen - Coarsens a DM object 1785 1786 Collective on DM 1787 1788 Input Parameter: 1789 + dm - the DM object 1790 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1791 1792 Output Parameter: 1793 . dmc - the coarsened DM 1794 1795 Level: developer 1796 1797 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1798 1799 @*/ 1800 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1801 { 1802 PetscErrorCode ierr; 1803 DMCoarsenHookLink link; 1804 1805 PetscFunctionBegin; 1806 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1807 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1808 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1809 (*dmc)->ops->initialguess = dm->ops->initialguess; 1810 (*dmc)->ops->function = dm->ops->function; 1811 (*dmc)->ops->functionj = dm->ops->functionj; 1812 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1813 (*dmc)->ops->jacobian = dm->ops->jacobian; 1814 } 1815 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1816 (*dmc)->ctx = dm->ctx; 1817 (*dmc)->levelup = dm->levelup; 1818 (*dmc)->leveldown = dm->leveldown + 1; 1819 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1820 for (link=dm->coarsenhook; link; link=link->next) { 1821 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1822 } 1823 PetscFunctionReturn(0); 1824 } 1825 1826 #undef __FUNCT__ 1827 #define __FUNCT__ "DMCoarsenHookAdd" 1828 /*@ 1829 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1830 1831 Logically Collective 1832 1833 Input Arguments: 1834 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1835 . coarsenhook - function to run when setting up a coarser level 1836 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1837 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1838 1839 Calling sequence of coarsenhook: 1840 $ coarsenhook(DM fine,DM coarse,void *ctx); 1841 1842 + fine - fine level DM 1843 . coarse - coarse level DM to restrict problem to 1844 - ctx - optional user-defined function context 1845 1846 Calling sequence for restricthook: 1847 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1848 1849 + fine - fine level DM 1850 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1851 . rscale - scaling vector for restriction 1852 . inject - matrix restricting by injection 1853 . coarse - coarse level DM to update 1854 - ctx - optional user-defined function context 1855 1856 Level: advanced 1857 1858 Notes: 1859 This function is only needed if auxiliary data needs to be set up on coarse grids. 1860 1861 If this function is called multiple times, the hooks will be run in the order they are added. 1862 1863 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1864 extract the finest level information from its context (instead of from the SNES). 1865 1866 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1867 @*/ 1868 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1869 { 1870 PetscErrorCode ierr; 1871 DMCoarsenHookLink link,*p; 1872 1873 PetscFunctionBegin; 1874 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1875 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1876 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1877 link->coarsenhook = coarsenhook; 1878 link->restricthook = restricthook; 1879 link->ctx = ctx; 1880 link->next = PETSC_NULL; 1881 *p = link; 1882 PetscFunctionReturn(0); 1883 } 1884 1885 #undef __FUNCT__ 1886 #define __FUNCT__ "DMRestrict" 1887 /*@ 1888 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1889 1890 Collective if any hooks are 1891 1892 Input Arguments: 1893 + fine - finer DM to use as a base 1894 . restrct - restriction matrix, apply using MatRestrict() 1895 . inject - injection matrix, also use MatRestrict() 1896 - coarse - coarer DM to update 1897 1898 Level: developer 1899 1900 .seealso: DMCoarsenHookAdd(), MatRestrict() 1901 @*/ 1902 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1903 { 1904 PetscErrorCode ierr; 1905 DMCoarsenHookLink link; 1906 1907 PetscFunctionBegin; 1908 for (link=fine->coarsenhook; link; link=link->next) { 1909 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1910 } 1911 PetscFunctionReturn(0); 1912 } 1913 1914 #undef __FUNCT__ 1915 #define __FUNCT__ "DMBlockRestrictHookAdd" 1916 /*@ 1917 DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1918 1919 Logically Collective 1920 1921 Input Arguments: 1922 + global - global DM 1923 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1924 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1925 1926 Calling sequence for restricthook: 1927 $ restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1928 1929 + global - global DM 1930 . out - scatter to the outer (with ghost and overlap points) block vector 1931 . in - scatter to block vector values only owned locally 1932 . block - block DM (may just be a shell if the global DM is passed in correctly) 1933 - ctx - optional user-defined function context 1934 1935 Level: advanced 1936 1937 Notes: 1938 This function is only needed if auxiliary data needs to be set up on coarse grids. 1939 1940 If this function is called multiple times, the hooks will be run in the order they are added. 1941 1942 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1943 extract the finest level information from its context (instead of from the SNES). 1944 1945 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1946 @*/ 1947 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1948 { 1949 PetscErrorCode ierr; 1950 DMBlockRestrictHookLink link,*p; 1951 1952 PetscFunctionBegin; 1953 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1954 for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1955 ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr); 1956 link->restricthook = restricthook; 1957 link->ctx = ctx; 1958 link->next = PETSC_NULL; 1959 *p = link; 1960 PetscFunctionReturn(0); 1961 } 1962 1963 #undef __FUNCT__ 1964 #define __FUNCT__ "DMBlockRestrict" 1965 /*@ 1966 DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd() 1967 1968 Collective if any hooks are 1969 1970 Input Arguments: 1971 + fine - finer DM to use as a base 1972 . restrct - restriction matrix, apply using MatRestrict() 1973 . inject - injection matrix, also use MatRestrict() 1974 - coarse - coarer DM to update 1975 1976 Level: developer 1977 1978 .seealso: DMCoarsenHookAdd(), MatRestrict() 1979 @*/ 1980 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block) 1981 { 1982 PetscErrorCode ierr; 1983 DMBlockRestrictHookLink link; 1984 1985 PetscFunctionBegin; 1986 for (link=global->blockrestricthook; link; link=link->next) { 1987 if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);} 1988 } 1989 PetscFunctionReturn(0); 1990 } 1991 1992 #undef __FUNCT__ 1993 #define __FUNCT__ "DMGetCoarsenLevel" 1994 /*@ 1995 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1996 1997 Not Collective 1998 1999 Input Parameter: 2000 . dm - the DM object 2001 2002 Output Parameter: 2003 . level - number of coarsenings 2004 2005 Level: developer 2006 2007 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2008 2009 @*/ 2010 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2011 { 2012 PetscFunctionBegin; 2013 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2014 *level = dm->leveldown; 2015 PetscFunctionReturn(0); 2016 } 2017 2018 2019 2020 #undef __FUNCT__ 2021 #define __FUNCT__ "DMRefineHierarchy" 2022 /*@C 2023 DMRefineHierarchy - Refines a DM object, all levels at once 2024 2025 Collective on DM 2026 2027 Input Parameter: 2028 + dm - the DM object 2029 - nlevels - the number of levels of refinement 2030 2031 Output Parameter: 2032 . dmf - the refined DM hierarchy 2033 2034 Level: developer 2035 2036 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2037 2038 @*/ 2039 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2040 { 2041 PetscErrorCode ierr; 2042 2043 PetscFunctionBegin; 2044 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2045 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2046 if (nlevels == 0) PetscFunctionReturn(0); 2047 if (dm->ops->refinehierarchy) { 2048 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2049 } else if (dm->ops->refine) { 2050 PetscInt i; 2051 2052 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 2053 for (i=1; i<nlevels; i++) { 2054 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 2055 } 2056 } else { 2057 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2058 } 2059 PetscFunctionReturn(0); 2060 } 2061 2062 #undef __FUNCT__ 2063 #define __FUNCT__ "DMCoarsenHierarchy" 2064 /*@C 2065 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2066 2067 Collective on DM 2068 2069 Input Parameter: 2070 + dm - the DM object 2071 - nlevels - the number of levels of coarsening 2072 2073 Output Parameter: 2074 . dmc - the coarsened DM hierarchy 2075 2076 Level: developer 2077 2078 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2079 2080 @*/ 2081 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2082 { 2083 PetscErrorCode ierr; 2084 2085 PetscFunctionBegin; 2086 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2087 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2088 if (nlevels == 0) PetscFunctionReturn(0); 2089 PetscValidPointer(dmc,3); 2090 if (dm->ops->coarsenhierarchy) { 2091 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2092 } else if (dm->ops->coarsen) { 2093 PetscInt i; 2094 2095 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 2096 for (i=1; i<nlevels; i++) { 2097 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 2098 } 2099 } else { 2100 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2101 } 2102 PetscFunctionReturn(0); 2103 } 2104 2105 #undef __FUNCT__ 2106 #define __FUNCT__ "DMCreateAggregates" 2107 /*@ 2108 DMCreateAggregates - Gets the aggregates that map between 2109 grids associated with two DMs. 2110 2111 Collective on DM 2112 2113 Input Parameters: 2114 + dmc - the coarse grid DM 2115 - dmf - the fine grid DM 2116 2117 Output Parameters: 2118 . rest - the restriction matrix (transpose of the projection matrix) 2119 2120 Level: intermediate 2121 2122 .keywords: interpolation, restriction, multigrid 2123 2124 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2125 @*/ 2126 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2127 { 2128 PetscErrorCode ierr; 2129 2130 PetscFunctionBegin; 2131 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2132 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2133 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2134 PetscFunctionReturn(0); 2135 } 2136 2137 #undef __FUNCT__ 2138 #define __FUNCT__ "DMSetApplicationContextDestroy" 2139 /*@C 2140 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2141 2142 Not Collective 2143 2144 Input Parameters: 2145 + dm - the DM object 2146 - destroy - the destroy function 2147 2148 Level: intermediate 2149 2150 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2151 2152 @*/ 2153 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2154 { 2155 PetscFunctionBegin; 2156 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2157 dm->ctxdestroy = destroy; 2158 PetscFunctionReturn(0); 2159 } 2160 2161 #undef __FUNCT__ 2162 #define __FUNCT__ "DMSetApplicationContext" 2163 /*@ 2164 DMSetApplicationContext - Set a user context into a DM object 2165 2166 Not Collective 2167 2168 Input Parameters: 2169 + dm - the DM object 2170 - ctx - the user context 2171 2172 Level: intermediate 2173 2174 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2175 2176 @*/ 2177 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2178 { 2179 PetscFunctionBegin; 2180 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2181 dm->ctx = ctx; 2182 PetscFunctionReturn(0); 2183 } 2184 2185 #undef __FUNCT__ 2186 #define __FUNCT__ "DMGetApplicationContext" 2187 /*@ 2188 DMGetApplicationContext - Gets a user context from a DM object 2189 2190 Not Collective 2191 2192 Input Parameter: 2193 . dm - the DM object 2194 2195 Output Parameter: 2196 . ctx - the user context 2197 2198 Level: intermediate 2199 2200 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2201 2202 @*/ 2203 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2204 { 2205 PetscFunctionBegin; 2206 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2207 *(void**)ctx = dm->ctx; 2208 PetscFunctionReturn(0); 2209 } 2210 2211 #undef __FUNCT__ 2212 #define __FUNCT__ "DMSetInitialGuess" 2213 /*@C 2214 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 2215 2216 Logically Collective on DM 2217 2218 Input Parameter: 2219 + dm - the DM object to destroy 2220 - f - the function to compute the initial guess 2221 2222 Level: intermediate 2223 2224 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2225 2226 @*/ 2227 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 2228 { 2229 PetscFunctionBegin; 2230 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2231 dm->ops->initialguess = f; 2232 PetscFunctionReturn(0); 2233 } 2234 2235 #undef __FUNCT__ 2236 #define __FUNCT__ "DMSetFunction" 2237 /*@C 2238 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 2239 2240 Logically Collective on DM 2241 2242 Input Parameter: 2243 + dm - the DM object 2244 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 2245 2246 Level: intermediate 2247 2248 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 2249 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 2250 2251 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2252 DMSetJacobian() 2253 2254 @*/ 2255 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2256 { 2257 PetscFunctionBegin; 2258 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2259 dm->ops->function = f; 2260 if (f) { 2261 dm->ops->functionj = f; 2262 } 2263 PetscFunctionReturn(0); 2264 } 2265 2266 #undef __FUNCT__ 2267 #define __FUNCT__ "DMSetJacobian" 2268 /*@C 2269 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 2270 2271 Logically Collective on DM 2272 2273 Input Parameter: 2274 + dm - the DM object to destroy 2275 - f - the function to compute the matrix entries 2276 2277 Level: intermediate 2278 2279 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2280 DMSetFunction() 2281 2282 @*/ 2283 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 2284 { 2285 PetscFunctionBegin; 2286 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2287 dm->ops->jacobian = f; 2288 PetscFunctionReturn(0); 2289 } 2290 2291 #undef __FUNCT__ 2292 #define __FUNCT__ "DMSetVariableBounds" 2293 /*@C 2294 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2295 2296 Logically Collective on DM 2297 2298 Input Parameter: 2299 + dm - the DM object 2300 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 2301 2302 Level: intermediate 2303 2304 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2305 DMSetJacobian() 2306 2307 @*/ 2308 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2309 { 2310 PetscFunctionBegin; 2311 dm->ops->computevariablebounds = f; 2312 PetscFunctionReturn(0); 2313 } 2314 2315 #undef __FUNCT__ 2316 #define __FUNCT__ "DMHasVariableBounds" 2317 /*@ 2318 DMHasVariableBounds - does the DM object have a variable bounds function? 2319 2320 Not Collective 2321 2322 Input Parameter: 2323 . dm - the DM object to destroy 2324 2325 Output Parameter: 2326 . flg - PETSC_TRUE if the variable bounds function exists 2327 2328 Level: developer 2329 2330 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2331 2332 @*/ 2333 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2334 { 2335 PetscFunctionBegin; 2336 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2337 PetscFunctionReturn(0); 2338 } 2339 2340 #undef __FUNCT__ 2341 #define __FUNCT__ "DMComputeVariableBounds" 2342 /*@C 2343 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2344 2345 Logically Collective on DM 2346 2347 Input Parameters: 2348 + dm - the DM object to destroy 2349 - x - current solution at which the bounds are computed 2350 2351 Output parameters: 2352 + xl - lower bound 2353 - xu - upper bound 2354 2355 Level: intermediate 2356 2357 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2358 DMSetFunction(), DMSetVariableBounds() 2359 2360 @*/ 2361 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2362 { 2363 PetscErrorCode ierr; 2364 PetscFunctionBegin; 2365 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2366 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2367 if (dm->ops->computevariablebounds) { 2368 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2369 } 2370 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2371 PetscFunctionReturn(0); 2372 } 2373 2374 #undef __FUNCT__ 2375 #define __FUNCT__ "DMComputeInitialGuess" 2376 /*@ 2377 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2378 2379 Collective on DM 2380 2381 Input Parameter: 2382 + dm - the DM object to destroy 2383 - x - the vector to hold the initial guess values 2384 2385 Level: developer 2386 2387 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2388 2389 @*/ 2390 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2391 { 2392 PetscErrorCode ierr; 2393 PetscFunctionBegin; 2394 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2395 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2396 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2397 PetscFunctionReturn(0); 2398 } 2399 2400 #undef __FUNCT__ 2401 #define __FUNCT__ "DMHasInitialGuess" 2402 /*@ 2403 DMHasInitialGuess - does the DM object have an initial guess function 2404 2405 Not Collective 2406 2407 Input Parameter: 2408 . dm - the DM object to destroy 2409 2410 Output Parameter: 2411 . flg - PETSC_TRUE if function exists 2412 2413 Level: developer 2414 2415 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2416 2417 @*/ 2418 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2419 { 2420 PetscFunctionBegin; 2421 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2422 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2423 PetscFunctionReturn(0); 2424 } 2425 2426 #undef __FUNCT__ 2427 #define __FUNCT__ "DMHasFunction" 2428 /*@ 2429 DMHasFunction - does the DM object have a function 2430 2431 Not Collective 2432 2433 Input Parameter: 2434 . dm - the DM object to destroy 2435 2436 Output Parameter: 2437 . flg - PETSC_TRUE if function exists 2438 2439 Level: developer 2440 2441 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2442 2443 @*/ 2444 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2445 { 2446 PetscFunctionBegin; 2447 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2448 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2449 PetscFunctionReturn(0); 2450 } 2451 2452 #undef __FUNCT__ 2453 #define __FUNCT__ "DMHasJacobian" 2454 /*@ 2455 DMHasJacobian - does the DM object have a matrix function 2456 2457 Not Collective 2458 2459 Input Parameter: 2460 . dm - the DM object to destroy 2461 2462 Output Parameter: 2463 . flg - PETSC_TRUE if function exists 2464 2465 Level: developer 2466 2467 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2468 2469 @*/ 2470 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2471 { 2472 PetscFunctionBegin; 2473 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2474 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2475 PetscFunctionReturn(0); 2476 } 2477 2478 #undef __FUNCT__ 2479 #define __FUNCT__ "DMHasColoring" 2480 /*@ 2481 DMHasColoring - does the DM object have a method of providing a coloring? 2482 2483 Not Collective 2484 2485 Input Parameter: 2486 . dm - the DM object 2487 2488 Output Parameter: 2489 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2490 2491 Level: developer 2492 2493 .seealso DMHasFunction(), DMCreateColoring() 2494 2495 @*/ 2496 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2497 { 2498 PetscFunctionBegin; 2499 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2500 PetscFunctionReturn(0); 2501 } 2502 2503 #undef __FUNCT__ 2504 #define __FUNCT__ "DMSetVec" 2505 /*@C 2506 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2507 2508 Collective on DM 2509 2510 Input Parameter: 2511 + dm - the DM object 2512 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2513 2514 Level: developer 2515 2516 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2517 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2518 2519 @*/ 2520 PetscErrorCode DMSetVec(DM dm,Vec x) 2521 { 2522 PetscErrorCode ierr; 2523 PetscFunctionBegin; 2524 if (x) { 2525 if (!dm->x) { 2526 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2527 } 2528 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2529 } 2530 else if (dm->x) { 2531 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2532 } 2533 PetscFunctionReturn(0); 2534 } 2535 2536 2537 #undef __FUNCT__ 2538 #define __FUNCT__ "DMComputeFunction" 2539 /*@ 2540 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2541 2542 Collective on DM 2543 2544 Input Parameter: 2545 + dm - the DM object to destroy 2546 . x - the location where the function is evaluationed, may be ignored for linear problems 2547 - b - the vector to hold the right hand side entries 2548 2549 Level: developer 2550 2551 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2552 DMSetJacobian() 2553 2554 @*/ 2555 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2556 { 2557 PetscErrorCode ierr; 2558 PetscFunctionBegin; 2559 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2560 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2561 PetscStackPush("DM user function"); 2562 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2563 PetscStackPop; 2564 PetscFunctionReturn(0); 2565 } 2566 2567 2568 2569 #undef __FUNCT__ 2570 #define __FUNCT__ "DMComputeJacobian" 2571 /*@ 2572 DMComputeJacobian - compute the matrix entries for the solver 2573 2574 Collective on DM 2575 2576 Input Parameter: 2577 + dm - the DM object 2578 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2579 . A - matrix that defines the operator for the linear solve 2580 - B - the matrix used to construct the preconditioner 2581 2582 Level: developer 2583 2584 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2585 DMSetFunction() 2586 2587 @*/ 2588 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2589 { 2590 PetscErrorCode ierr; 2591 2592 PetscFunctionBegin; 2593 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2594 if (!dm->ops->jacobian) { 2595 ISColoring coloring; 2596 MatFDColoring fd; 2597 MatType mtype; 2598 2599 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2600 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2601 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2602 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2603 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2604 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2605 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2606 2607 dm->fd = fd; 2608 dm->ops->jacobian = DMComputeJacobianDefault; 2609 2610 /* don't know why this is needed */ 2611 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2612 } 2613 if (!x) x = dm->x; 2614 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2615 2616 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2617 if (x) { 2618 if (!dm->x) { 2619 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2620 } 2621 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2622 } 2623 if (A != B) { 2624 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2625 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2626 } 2627 PetscFunctionReturn(0); 2628 } 2629 2630 2631 PetscFList DMList = PETSC_NULL; 2632 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2633 2634 #undef __FUNCT__ 2635 #define __FUNCT__ "DMSetType" 2636 /*@C 2637 DMSetType - Builds a DM, for a particular DM implementation. 2638 2639 Collective on DM 2640 2641 Input Parameters: 2642 + dm - The DM object 2643 - method - The name of the DM type 2644 2645 Options Database Key: 2646 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2647 2648 Notes: 2649 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2650 2651 Level: intermediate 2652 2653 .keywords: DM, set, type 2654 .seealso: DMGetType(), DMCreate() 2655 @*/ 2656 PetscErrorCode DMSetType(DM dm, DMType method) 2657 { 2658 PetscErrorCode (*r)(DM); 2659 PetscBool match; 2660 PetscErrorCode ierr; 2661 2662 PetscFunctionBegin; 2663 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2664 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2665 if (match) PetscFunctionReturn(0); 2666 2667 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2668 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2669 if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2670 2671 if (dm->ops->destroy) { 2672 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2673 dm->ops->destroy = PETSC_NULL; 2674 } 2675 ierr = (*r)(dm);CHKERRQ(ierr); 2676 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2677 PetscFunctionReturn(0); 2678 } 2679 2680 #undef __FUNCT__ 2681 #define __FUNCT__ "DMGetType" 2682 /*@C 2683 DMGetType - Gets the DM type name (as a string) from the DM. 2684 2685 Not Collective 2686 2687 Input Parameter: 2688 . dm - The DM 2689 2690 Output Parameter: 2691 . type - The DM type name 2692 2693 Level: intermediate 2694 2695 .keywords: DM, get, type, name 2696 .seealso: DMSetType(), DMCreate() 2697 @*/ 2698 PetscErrorCode DMGetType(DM dm, DMType *type) 2699 { 2700 PetscErrorCode ierr; 2701 2702 PetscFunctionBegin; 2703 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2704 PetscValidCharPointer(type,2); 2705 if (!DMRegisterAllCalled) { 2706 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2707 } 2708 *type = ((PetscObject)dm)->type_name; 2709 PetscFunctionReturn(0); 2710 } 2711 2712 #undef __FUNCT__ 2713 #define __FUNCT__ "DMConvert" 2714 /*@C 2715 DMConvert - Converts a DM to another DM, either of the same or different type. 2716 2717 Collective on DM 2718 2719 Input Parameters: 2720 + dm - the DM 2721 - newtype - new DM type (use "same" for the same type) 2722 2723 Output Parameter: 2724 . M - pointer to new DM 2725 2726 Notes: 2727 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2728 the MPI communicator of the generated DM is always the same as the communicator 2729 of the input DM. 2730 2731 Level: intermediate 2732 2733 .seealso: DMCreate() 2734 @*/ 2735 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2736 { 2737 DM B; 2738 char convname[256]; 2739 PetscBool sametype, issame; 2740 PetscErrorCode ierr; 2741 2742 PetscFunctionBegin; 2743 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2744 PetscValidType(dm,1); 2745 PetscValidPointer(M,3); 2746 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2747 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2748 { 2749 PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL; 2750 2751 /* 2752 Order of precedence: 2753 1) See if a specialized converter is known to the current DM. 2754 2) See if a specialized converter is known to the desired DM class. 2755 3) See if a good general converter is registered for the desired class 2756 4) See if a good general converter is known for the current matrix. 2757 5) Use a really basic converter. 2758 */ 2759 2760 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2761 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2762 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2763 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2764 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2765 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2766 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2767 if (conv) goto foundconv; 2768 2769 /* 2) See if a specialized converter is known to the desired DM class. */ 2770 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2771 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2772 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2773 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2774 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2775 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2776 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2777 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2778 if (conv) { 2779 ierr = DMDestroy(&B);CHKERRQ(ierr); 2780 goto foundconv; 2781 } 2782 2783 #if 0 2784 /* 3) See if a good general converter is registered for the desired class */ 2785 conv = B->ops->convertfrom; 2786 ierr = DMDestroy(&B);CHKERRQ(ierr); 2787 if (conv) goto foundconv; 2788 2789 /* 4) See if a good general converter is known for the current matrix */ 2790 if (dm->ops->convert) { 2791 conv = dm->ops->convert; 2792 } 2793 if (conv) goto foundconv; 2794 #endif 2795 2796 /* 5) Use a really basic converter. */ 2797 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2798 2799 foundconv: 2800 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2801 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2802 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2803 } 2804 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2805 PetscFunctionReturn(0); 2806 } 2807 2808 /*--------------------------------------------------------------------------------------------------------------------*/ 2809 2810 #undef __FUNCT__ 2811 #define __FUNCT__ "DMRegister" 2812 /*@C 2813 DMRegister - See DMRegisterDynamic() 2814 2815 Level: advanced 2816 @*/ 2817 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2818 { 2819 char fullname[PETSC_MAX_PATH_LEN]; 2820 PetscErrorCode ierr; 2821 2822 PetscFunctionBegin; 2823 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2824 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2825 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2826 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2827 PetscFunctionReturn(0); 2828 } 2829 2830 2831 /*--------------------------------------------------------------------------------------------------------------------*/ 2832 #undef __FUNCT__ 2833 #define __FUNCT__ "DMRegisterDestroy" 2834 /*@C 2835 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2836 2837 Not Collective 2838 2839 Level: advanced 2840 2841 .keywords: DM, register, destroy 2842 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2843 @*/ 2844 PetscErrorCode DMRegisterDestroy(void) 2845 { 2846 PetscErrorCode ierr; 2847 2848 PetscFunctionBegin; 2849 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2850 DMRegisterAllCalled = PETSC_FALSE; 2851 PetscFunctionReturn(0); 2852 } 2853 2854 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2855 #include <mex.h> 2856 2857 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2858 2859 #undef __FUNCT__ 2860 #define __FUNCT__ "DMComputeFunction_Matlab" 2861 /* 2862 DMComputeFunction_Matlab - Calls the function that has been set with 2863 DMSetFunctionMatlab(). 2864 2865 For linear problems x is null 2866 2867 .seealso: DMSetFunction(), DMGetFunction() 2868 */ 2869 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2870 { 2871 PetscErrorCode ierr; 2872 DMMatlabContext *sctx; 2873 int nlhs = 1,nrhs = 4; 2874 mxArray *plhs[1],*prhs[4]; 2875 long long int lx = 0,ly = 0,ls = 0; 2876 2877 PetscFunctionBegin; 2878 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2879 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2880 PetscCheckSameComm(dm,1,y,3); 2881 2882 /* call Matlab function in ctx with arguments x and y */ 2883 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2884 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2885 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2886 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2887 prhs[0] = mxCreateDoubleScalar((double)ls); 2888 prhs[1] = mxCreateDoubleScalar((double)lx); 2889 prhs[2] = mxCreateDoubleScalar((double)ly); 2890 prhs[3] = mxCreateString(sctx->funcname); 2891 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2892 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2893 mxDestroyArray(prhs[0]); 2894 mxDestroyArray(prhs[1]); 2895 mxDestroyArray(prhs[2]); 2896 mxDestroyArray(prhs[3]); 2897 mxDestroyArray(plhs[0]); 2898 PetscFunctionReturn(0); 2899 } 2900 2901 2902 #undef __FUNCT__ 2903 #define __FUNCT__ "DMSetFunctionMatlab" 2904 /* 2905 DMSetFunctionMatlab - Sets the function evaluation routine 2906 2907 */ 2908 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2909 { 2910 PetscErrorCode ierr; 2911 DMMatlabContext *sctx; 2912 2913 PetscFunctionBegin; 2914 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2915 /* currently sctx is memory bleed */ 2916 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2917 if (!sctx) { 2918 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2919 } 2920 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2921 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2922 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2923 PetscFunctionReturn(0); 2924 } 2925 2926 #undef __FUNCT__ 2927 #define __FUNCT__ "DMComputeJacobian_Matlab" 2928 /* 2929 DMComputeJacobian_Matlab - Calls the function that has been set with 2930 DMSetJacobianMatlab(). 2931 2932 For linear problems x is null 2933 2934 .seealso: DMSetFunction(), DMGetFunction() 2935 */ 2936 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2937 { 2938 PetscErrorCode ierr; 2939 DMMatlabContext *sctx; 2940 int nlhs = 2,nrhs = 5; 2941 mxArray *plhs[2],*prhs[5]; 2942 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2943 2944 PetscFunctionBegin; 2945 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2946 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2947 2948 /* call MATLAB function in ctx with arguments x, A, and B */ 2949 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2950 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2951 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2952 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2953 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2954 prhs[0] = mxCreateDoubleScalar((double)ls); 2955 prhs[1] = mxCreateDoubleScalar((double)lx); 2956 prhs[2] = mxCreateDoubleScalar((double)lA); 2957 prhs[3] = mxCreateDoubleScalar((double)lB); 2958 prhs[4] = mxCreateString(sctx->jacname); 2959 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2960 *str = (MatStructure) mxGetScalar(plhs[0]); 2961 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2962 mxDestroyArray(prhs[0]); 2963 mxDestroyArray(prhs[1]); 2964 mxDestroyArray(prhs[2]); 2965 mxDestroyArray(prhs[3]); 2966 mxDestroyArray(prhs[4]); 2967 mxDestroyArray(plhs[0]); 2968 mxDestroyArray(plhs[1]); 2969 PetscFunctionReturn(0); 2970 } 2971 2972 2973 #undef __FUNCT__ 2974 #define __FUNCT__ "DMSetJacobianMatlab" 2975 /* 2976 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2977 2978 */ 2979 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2980 { 2981 PetscErrorCode ierr; 2982 DMMatlabContext *sctx; 2983 2984 PetscFunctionBegin; 2985 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2986 /* currently sctx is memory bleed */ 2987 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2988 if (!sctx) { 2989 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2990 } 2991 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2992 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2993 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2994 PetscFunctionReturn(0); 2995 } 2996 #endif 2997 2998 #undef __FUNCT__ 2999 #define __FUNCT__ "DMLoad" 3000 /*@C 3001 DMLoad - Loads a DM that has been stored in binary with DMView(). 3002 3003 Collective on PetscViewer 3004 3005 Input Parameters: 3006 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 3007 some related function before a call to DMLoad(). 3008 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 3009 HDF5 file viewer, obtained from PetscViewerHDF5Open() 3010 3011 Level: intermediate 3012 3013 Notes: 3014 The type is determined by the data in the file, any type set into the DM before this call is ignored. 3015 3016 Notes for advanced users: 3017 Most users should not need to know the details of the binary storage 3018 format, since DMLoad() and DMView() completely hide these details. 3019 But for anyone who's interested, the standard binary matrix storage 3020 format is 3021 .vb 3022 has not yet been determined 3023 .ve 3024 3025 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 3026 @*/ 3027 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 3028 { 3029 PetscErrorCode ierr; 3030 PetscBool isbinary; 3031 PetscInt classid; 3032 char type[256]; 3033 3034 PetscFunctionBegin; 3035 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 3036 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 3037 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 3038 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 3039 3040 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 3041 if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file"); 3042 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 3043 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 3044 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 3045 PetscFunctionReturn(0); 3046 } 3047 3048 /******************************** FEM Support **********************************/ 3049 3050 #undef __FUNCT__ 3051 #define __FUNCT__ "DMPrintCellVector" 3052 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 3053 PetscInt f; 3054 PetscErrorCode ierr; 3055 3056 PetscFunctionBegin; 3057 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 3058 for (f = 0; f < len; ++f) { 3059 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 3060 } 3061 PetscFunctionReturn(0); 3062 } 3063 3064 #undef __FUNCT__ 3065 #define __FUNCT__ "DMPrintCellMatrix" 3066 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 3067 PetscInt f, g; 3068 PetscErrorCode ierr; 3069 3070 PetscFunctionBegin; 3071 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 3072 for (f = 0; f < rows; ++f) { 3073 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 3074 for (g = 0; g < cols; ++g) { 3075 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 3076 } 3077 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 3078 } 3079 PetscFunctionReturn(0); 3080 } 3081 3082 #undef __FUNCT__ 3083 #define __FUNCT__ "DMGetLocalFunction" 3084 /*@C 3085 DMGetLocalFunction - Get the local residual function from this DM 3086 3087 Not collective 3088 3089 Input Parameter: 3090 . dm - The DM 3091 3092 Output Parameter: 3093 . lf - The local residual function 3094 3095 Calling sequence of lf: 3096 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3097 3098 + snes - the SNES context 3099 . x - local vector with the state at which to evaluate residual 3100 . f - local vector to put residual in 3101 - ctx - optional user-defined function context 3102 3103 Level: intermediate 3104 3105 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3106 @*/ 3107 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 3108 { 3109 PetscFunctionBegin; 3110 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3111 if (lf) *lf = dm->lf; 3112 PetscFunctionReturn(0); 3113 } 3114 3115 #undef __FUNCT__ 3116 #define __FUNCT__ "DMSetLocalFunction" 3117 /*@C 3118 DMSetLocalFunction - Set the local residual function from this DM 3119 3120 Not collective 3121 3122 Input Parameters: 3123 + dm - The DM 3124 - lf - The local residual function 3125 3126 Calling sequence of lf: 3127 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3128 3129 + snes - the SNES context 3130 . x - local vector with the state at which to evaluate residual 3131 . f - local vector to put residual in 3132 - ctx - optional user-defined function context 3133 3134 Level: intermediate 3135 3136 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3137 @*/ 3138 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 3139 { 3140 PetscFunctionBegin; 3141 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3142 dm->lf = lf; 3143 PetscFunctionReturn(0); 3144 } 3145 3146 #undef __FUNCT__ 3147 #define __FUNCT__ "DMGetLocalJacobian" 3148 /*@C 3149 DMGetLocalJacobian - Get the local Jacobian function from this DM 3150 3151 Not collective 3152 3153 Input Parameter: 3154 . dm - The DM 3155 3156 Output Parameter: 3157 . lj - The local Jacobian function 3158 3159 Calling sequence of lj: 3160 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3161 3162 + snes - the SNES context 3163 . x - local vector with the state at which to evaluate residual 3164 . J - matrix to put Jacobian in 3165 . M - matrix to use for defining Jacobian preconditioner 3166 - ctx - optional user-defined function context 3167 3168 Level: intermediate 3169 3170 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3171 @*/ 3172 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 3173 { 3174 PetscFunctionBegin; 3175 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3176 if (lj) *lj = dm->lj; 3177 PetscFunctionReturn(0); 3178 } 3179 3180 #undef __FUNCT__ 3181 #define __FUNCT__ "DMSetLocalJacobian" 3182 /*@C 3183 DMSetLocalJacobian - Set the local Jacobian function from this DM 3184 3185 Not collective 3186 3187 Input Parameters: 3188 + dm - The DM 3189 - lj - The local Jacobian function 3190 3191 Calling sequence of lj: 3192 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3193 3194 + snes - the SNES context 3195 . x - local vector with the state at which to evaluate residual 3196 . J - matrix to put Jacobian in 3197 . M - matrix to use for defining Jacobian preconditioner 3198 - ctx - optional user-defined function context 3199 3200 Level: intermediate 3201 3202 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3203 @*/ 3204 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 3205 { 3206 PetscFunctionBegin; 3207 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3208 dm->lj = lj; 3209 PetscFunctionReturn(0); 3210 } 3211 3212 #undef __FUNCT__ 3213 #define __FUNCT__ "DMGetDefaultSection" 3214 /*@ 3215 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 3216 3217 Input Parameter: 3218 . dm - The DM 3219 3220 Output Parameter: 3221 . section - The PetscSection 3222 3223 Level: intermediate 3224 3225 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3226 3227 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3228 @*/ 3229 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 3230 PetscFunctionBegin; 3231 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3232 PetscValidPointer(section, 2); 3233 *section = dm->defaultSection; 3234 PetscFunctionReturn(0); 3235 } 3236 3237 #undef __FUNCT__ 3238 #define __FUNCT__ "DMSetDefaultSection" 3239 /*@ 3240 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 3241 3242 Input Parameters: 3243 + dm - The DM 3244 - section - The PetscSection 3245 3246 Level: intermediate 3247 3248 Note: Any existing Section will be destroyed 3249 3250 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3251 @*/ 3252 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 3253 PetscInt numFields; 3254 PetscInt f; 3255 PetscErrorCode ierr; 3256 3257 PetscFunctionBegin; 3258 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3259 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 3260 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3261 dm->defaultSection = section; 3262 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 3263 if (numFields) { 3264 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 3265 for (f = 0; f < numFields; ++f) { 3266 const char *name; 3267 3268 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 3269 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 3270 } 3271 } 3272 PetscFunctionReturn(0); 3273 } 3274 3275 #undef __FUNCT__ 3276 #define __FUNCT__ "DMGetDefaultGlobalSection" 3277 /*@ 3278 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 3279 3280 Collective on DM 3281 3282 Input Parameter: 3283 . dm - The DM 3284 3285 Output Parameter: 3286 . section - The PetscSection 3287 3288 Level: intermediate 3289 3290 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3291 3292 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 3293 @*/ 3294 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 3295 PetscErrorCode ierr; 3296 3297 PetscFunctionBegin; 3298 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3299 PetscValidPointer(section, 2); 3300 if (!dm->defaultGlobalSection) { 3301 if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 3302 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 3303 ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 3304 } 3305 *section = dm->defaultGlobalSection; 3306 PetscFunctionReturn(0); 3307 } 3308 3309 #undef __FUNCT__ 3310 #define __FUNCT__ "DMSetDefaultGlobalSection" 3311 /*@ 3312 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 3313 3314 Input Parameters: 3315 + dm - The DM 3316 - section - The PetscSection 3317 3318 Level: intermediate 3319 3320 Note: Any existing Section will be destroyed 3321 3322 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 3323 @*/ 3324 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) { 3325 PetscErrorCode ierr; 3326 3327 PetscFunctionBegin; 3328 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3329 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3330 dm->defaultGlobalSection = section; 3331 PetscFunctionReturn(0); 3332 } 3333 3334 #undef __FUNCT__ 3335 #define __FUNCT__ "DMGetDefaultSF" 3336 /*@ 3337 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3338 it is created from the default PetscSection layouts in the DM. 3339 3340 Input Parameter: 3341 . dm - The DM 3342 3343 Output Parameter: 3344 . sf - The PetscSF 3345 3346 Level: intermediate 3347 3348 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3349 3350 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3351 @*/ 3352 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3353 PetscInt nroots; 3354 PetscErrorCode ierr; 3355 3356 PetscFunctionBegin; 3357 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3358 PetscValidPointer(sf, 2); 3359 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3360 if (nroots < 0) { 3361 PetscSection section, gSection; 3362 3363 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3364 if (section) { 3365 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3366 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3367 } else { 3368 *sf = PETSC_NULL; 3369 PetscFunctionReturn(0); 3370 } 3371 } 3372 *sf = dm->defaultSF; 3373 PetscFunctionReturn(0); 3374 } 3375 3376 #undef __FUNCT__ 3377 #define __FUNCT__ "DMSetDefaultSF" 3378 /*@ 3379 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3380 3381 Input Parameters: 3382 + dm - The DM 3383 - sf - The PetscSF 3384 3385 Level: intermediate 3386 3387 Note: Any previous SF is destroyed 3388 3389 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3390 @*/ 3391 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3392 PetscErrorCode ierr; 3393 3394 PetscFunctionBegin; 3395 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3396 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3397 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3398 dm->defaultSF = sf; 3399 PetscFunctionReturn(0); 3400 } 3401 3402 #undef __FUNCT__ 3403 #define __FUNCT__ "DMCreateDefaultSF" 3404 /*@C 3405 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3406 describing the data layout. 3407 3408 Input Parameters: 3409 + dm - The DM 3410 . localSection - PetscSection describing the local data layout 3411 - globalSection - PetscSection describing the global data layout 3412 3413 Level: intermediate 3414 3415 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3416 @*/ 3417 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3418 { 3419 MPI_Comm comm = ((PetscObject) dm)->comm; 3420 PetscLayout layout; 3421 const PetscInt *ranges; 3422 PetscInt *local; 3423 PetscSFNode *remote; 3424 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3425 PetscMPIInt size, rank; 3426 PetscErrorCode ierr; 3427 3428 PetscFunctionBegin; 3429 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3430 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3431 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3432 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3433 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3434 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3435 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3436 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3437 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3438 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3439 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 3440 PetscInt gdof, gcdof; 3441 3442 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3443 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3444 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3445 } 3446 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3447 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3448 for (p = pStart, l = 0; p < pEnd; ++p) { 3449 const PetscInt *cind; 3450 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3451 3452 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3453 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3454 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3455 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3456 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3457 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3458 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3459 if (!gdof) continue; /* Censored point */ 3460 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3461 if (gsize != dof-cdof) { 3462 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3463 cdof = 0; /* Ignore constraints */ 3464 } 3465 for (d = 0, c = 0; d < dof; ++d) { 3466 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3467 local[l+d-c] = off+d; 3468 } 3469 if (gdof < 0) { 3470 for(d = 0; d < gsize; ++d, ++l) { 3471 PetscInt offset = -(goff+1) + d, r; 3472 3473 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 3474 if (r < 0) r = -(r+2); 3475 remote[l].rank = r; 3476 remote[l].index = offset - ranges[r]; 3477 } 3478 } else { 3479 for(d = 0; d < gsize; ++d, ++l) { 3480 remote[l].rank = rank; 3481 remote[l].index = goff+d - ranges[rank]; 3482 } 3483 } 3484 } 3485 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3486 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3487 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3488 PetscFunctionReturn(0); 3489 } 3490 3491 #undef __FUNCT__ 3492 #define __FUNCT__ "DMGetPointSF" 3493 /*@ 3494 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3495 3496 Input Parameter: 3497 . dm - The DM 3498 3499 Output Parameter: 3500 . sf - The PetscSF 3501 3502 Level: intermediate 3503 3504 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3505 3506 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3507 @*/ 3508 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) { 3509 PetscFunctionBegin; 3510 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3511 PetscValidPointer(sf, 2); 3512 *sf = dm->sf; 3513 PetscFunctionReturn(0); 3514 } 3515 3516 #undef __FUNCT__ 3517 #define __FUNCT__ "DMSetPointSF" 3518 /*@ 3519 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3520 3521 Input Parameters: 3522 + dm - The DM 3523 - sf - The PetscSF 3524 3525 Level: intermediate 3526 3527 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3528 @*/ 3529 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) { 3530 PetscErrorCode ierr; 3531 3532 PetscFunctionBegin; 3533 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3534 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3535 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3536 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3537 dm->sf = sf; 3538 PetscFunctionReturn(0); 3539 } 3540 3541 #undef __FUNCT__ 3542 #define __FUNCT__ "DMGetNumFields" 3543 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3544 { 3545 PetscFunctionBegin; 3546 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3547 PetscValidPointer(numFields, 2); 3548 *numFields = dm->numFields; 3549 PetscFunctionReturn(0); 3550 } 3551 3552 #undef __FUNCT__ 3553 #define __FUNCT__ "DMSetNumFields" 3554 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3555 { 3556 PetscInt f; 3557 PetscErrorCode ierr; 3558 3559 PetscFunctionBegin; 3560 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3561 for (f = 0; f < dm->numFields; ++f) { 3562 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3563 } 3564 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3565 dm->numFields = numFields; 3566 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3567 for (f = 0; f < dm->numFields; ++f) { 3568 ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3569 } 3570 PetscFunctionReturn(0); 3571 } 3572 3573 #undef __FUNCT__ 3574 #define __FUNCT__ "DMGetField" 3575 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3576 { 3577 PetscFunctionBegin; 3578 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3579 PetscValidPointer(field, 2); 3580 if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3581 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3582 *field = dm->fields[f]; 3583 PetscFunctionReturn(0); 3584 } 3585 3586 #undef __FUNCT__ 3587 #define __FUNCT__ "DMSetCoordinates" 3588 /*@ 3589 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3590 3591 Collective on DM 3592 3593 Input Parameters: 3594 + dm - the DM 3595 - c - coordinate vector 3596 3597 Note: 3598 The coordinates do include those for ghost points, which are in the local vector 3599 3600 Level: intermediate 3601 3602 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3603 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3604 @*/ 3605 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3606 { 3607 PetscErrorCode ierr; 3608 3609 PetscFunctionBegin; 3610 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3611 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3612 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3613 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3614 dm->coordinates = c; 3615 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3616 PetscFunctionReturn(0); 3617 } 3618 3619 #undef __FUNCT__ 3620 #define __FUNCT__ "DMSetCoordinatesLocal" 3621 /*@ 3622 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3623 3624 Collective on DM 3625 3626 Input Parameters: 3627 + dm - the DM 3628 - c - coordinate vector 3629 3630 Note: 3631 The coordinates of ghost points can be set using DMSetCoordinates() 3632 followed by DMGetCoordinatesLocal(). This is intended to enable the 3633 setting of ghost coordinates outside of the domain. 3634 3635 Level: intermediate 3636 3637 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3638 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3639 @*/ 3640 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3641 { 3642 PetscErrorCode ierr; 3643 3644 PetscFunctionBegin; 3645 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3646 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3647 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3648 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3649 dm->coordinatesLocal = c; 3650 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3651 PetscFunctionReturn(0); 3652 } 3653 3654 #undef __FUNCT__ 3655 #define __FUNCT__ "DMGetCoordinates" 3656 /*@ 3657 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3658 3659 Not Collective 3660 3661 Input Parameter: 3662 . dm - the DM 3663 3664 Output Parameter: 3665 . c - global coordinate vector 3666 3667 Note: 3668 This is a borrowed reference, so the user should NOT destroy this vector 3669 3670 Each process has only the local coordinates (does NOT have the ghost coordinates). 3671 3672 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3673 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3674 3675 Level: intermediate 3676 3677 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3678 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3679 @*/ 3680 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3681 { 3682 PetscErrorCode ierr; 3683 3684 PetscFunctionBegin; 3685 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3686 PetscValidPointer(c,2); 3687 if (!dm->coordinates && dm->coordinatesLocal) { 3688 DM cdm; 3689 3690 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3691 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3692 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3693 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3694 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3695 } 3696 *c = dm->coordinates; 3697 PetscFunctionReturn(0); 3698 } 3699 3700 #undef __FUNCT__ 3701 #define __FUNCT__ "DMGetCoordinatesLocal" 3702 /*@ 3703 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3704 3705 Collective on DM 3706 3707 Input Parameter: 3708 . dm - the DM 3709 3710 Output Parameter: 3711 . c - coordinate vector 3712 3713 Note: 3714 This is a borrowed reference, so the user should NOT destroy this vector 3715 3716 Each process has the local and ghost coordinates 3717 3718 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3719 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3720 3721 Level: intermediate 3722 3723 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3724 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3725 @*/ 3726 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3727 { 3728 PetscErrorCode ierr; 3729 3730 PetscFunctionBegin; 3731 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3732 PetscValidPointer(c,2); 3733 if (!dm->coordinatesLocal && dm->coordinates) { 3734 DM cdm; 3735 3736 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3737 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3738 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3739 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3740 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3741 } 3742 *c = dm->coordinatesLocal; 3743 PetscFunctionReturn(0); 3744 } 3745 3746 #undef __FUNCT__ 3747 #define __FUNCT__ "DMGetCoordinateDM" 3748 /*@ 3749 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3750 3751 Collective on DM 3752 3753 Input Parameter: 3754 . dm - the DM 3755 3756 Output Parameter: 3757 . cdm - coordinate DM 3758 3759 Level: intermediate 3760 3761 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3762 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3763 @*/ 3764 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3765 { 3766 PetscErrorCode ierr; 3767 3768 PetscFunctionBegin; 3769 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3770 PetscValidPointer(cdm,2); 3771 if (!dm->coordinateDM) { 3772 if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3773 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3774 } 3775 *cdm = dm->coordinateDM; 3776 PetscFunctionReturn(0); 3777 } 3778