1 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 2 3 PetscClassId DM_CLASSID; 4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 5 6 #undef __FUNCT__ 7 #define __FUNCT__ "DMCreate" 8 /*@ 9 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 10 11 If you never call DMSetType() it will generate an 12 error when you try to use the vector. 13 14 Collective on MPI_Comm 15 16 Input Parameter: 17 . comm - The communicator for the DM object 18 19 Output Parameter: 20 . dm - The DM object 21 22 Level: beginner 23 24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 25 @*/ 26 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 27 { 28 DM v; 29 PetscErrorCode ierr; 30 31 PetscFunctionBegin; 32 PetscValidPointer(dm,2); 33 *dm = PETSC_NULL; 34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 35 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 36 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 #endif 39 40 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 41 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 42 43 44 v->ltogmap = PETSC_NULL; 45 v->ltogmapb = PETSC_NULL; 46 v->bs = 1; 47 v->coloringtype = IS_COLORING_GLOBAL; 48 v->lf = PETSC_NULL; 49 v->lj = PETSC_NULL; 50 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 51 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 52 v->defaultSection = PETSC_NULL; 53 v->defaultGlobalSection = PETSC_NULL; 54 { 55 PetscInt i; 56 for (i = 0; i < 10; ++i) { 57 v->nullspaceConstructors[i] = PETSC_NULL; 58 } 59 } 60 v->numFields = 0; 61 v->fields = PETSC_NULL; 62 63 *dm = v; 64 PetscFunctionReturn(0); 65 } 66 67 #undef __FUNCT__ 68 #define __FUNCT__ "DMSetVecType" 69 /*@C 70 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 71 72 Logically Collective on DMDA 73 74 Input Parameter: 75 + da - initial distributed array 76 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 77 78 Options Database: 79 . -dm_vec_type ctype 80 81 Level: intermediate 82 83 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 84 @*/ 85 PetscErrorCode DMSetVecType(DM da,VecType ctype) 86 { 87 PetscErrorCode ierr; 88 89 PetscFunctionBegin; 90 PetscValidHeaderSpecific(da,DM_CLASSID,1); 91 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 92 ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr); 93 PetscFunctionReturn(0); 94 } 95 96 #undef __FUNCT__ 97 #define __FUNCT__ "VecGetDM" 98 /*@ 99 VecGetDM - Gets the DM defining the data layout of the vector 100 101 Not collective 102 103 Input Parameter: 104 . v - The Vec 105 106 Output Parameter: 107 . dm - The DM 108 109 Level: intermediate 110 111 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 112 @*/ 113 PetscErrorCode VecGetDM(Vec v, DM *dm) 114 { 115 PetscErrorCode ierr; 116 117 PetscFunctionBegin; 118 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 119 PetscValidPointer(dm,2); 120 ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr); 121 PetscFunctionReturn(0); 122 } 123 124 #undef __FUNCT__ 125 #define __FUNCT__ "VecSetDM" 126 /*@ 127 VecSetDM - Sets the DM defining the data layout of the vector 128 129 Not collective 130 131 Input Parameters: 132 + v - The Vec 133 - dm - The DM 134 135 Level: intermediate 136 137 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType() 138 @*/ 139 PetscErrorCode VecSetDM(Vec v, DM dm) 140 { 141 PetscErrorCode ierr; 142 143 PetscFunctionBegin; 144 PetscValidHeaderSpecific(v,VEC_CLASSID,1); 145 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 146 ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 147 PetscFunctionReturn(0); 148 } 149 150 #undef __FUNCT__ 151 #define __FUNCT__ "DMSetMatType" 152 /*@C 153 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 154 155 Logically Collective on DM 156 157 Input Parameter: 158 + dm - the DM context 159 . ctype - the matrix type 160 161 Options Database: 162 . -dm_mat_type ctype 163 164 Level: intermediate 165 166 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 167 @*/ 168 PetscErrorCode DMSetMatType(DM dm,MatType ctype) 169 { 170 PetscErrorCode ierr; 171 PetscFunctionBegin; 172 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 173 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 174 ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr); 175 PetscFunctionReturn(0); 176 } 177 178 #undef __FUNCT__ 179 #define __FUNCT__ "MatGetDM" 180 /*@ 181 MatGetDM - Gets the DM defining the data layout of the matrix 182 183 Not collective 184 185 Input Parameter: 186 . A - The Mat 187 188 Output Parameter: 189 . dm - The DM 190 191 Level: intermediate 192 193 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType() 194 @*/ 195 PetscErrorCode MatGetDM(Mat A, DM *dm) 196 { 197 PetscErrorCode ierr; 198 199 PetscFunctionBegin; 200 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 201 PetscValidPointer(dm,2); 202 ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr); 203 PetscFunctionReturn(0); 204 } 205 206 #undef __FUNCT__ 207 #define __FUNCT__ "MatSetDM" 208 /*@ 209 MatSetDM - Sets the DM defining the data layout of the matrix 210 211 Not collective 212 213 Input Parameters: 214 + A - The Mat 215 - dm - The DM 216 217 Level: intermediate 218 219 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType() 220 @*/ 221 PetscErrorCode MatSetDM(Mat A, DM dm) 222 { 223 PetscErrorCode ierr; 224 225 PetscFunctionBegin; 226 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 227 if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);} 228 ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr); 229 PetscFunctionReturn(0); 230 } 231 232 #undef __FUNCT__ 233 #define __FUNCT__ "DMSetOptionsPrefix" 234 /*@C 235 DMSetOptionsPrefix - Sets the prefix used for searching for all 236 DMDA options in the database. 237 238 Logically Collective on DMDA 239 240 Input Parameter: 241 + da - the DMDA context 242 - prefix - the prefix to prepend to all option names 243 244 Notes: 245 A hyphen (-) must NOT be given at the beginning of the prefix name. 246 The first character of all runtime options is AUTOMATICALLY the hyphen. 247 248 Level: advanced 249 250 .keywords: DMDA, set, options, prefix, database 251 252 .seealso: DMSetFromOptions() 253 @*/ 254 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 255 { 256 PetscErrorCode ierr; 257 258 PetscFunctionBegin; 259 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 260 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 261 PetscFunctionReturn(0); 262 } 263 264 #undef __FUNCT__ 265 #define __FUNCT__ "DMDestroy" 266 /*@ 267 DMDestroy - Destroys a vector packer or DMDA. 268 269 Collective on DM 270 271 Input Parameter: 272 . dm - the DM object to destroy 273 274 Level: developer 275 276 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 277 278 @*/ 279 PetscErrorCode DMDestroy(DM *dm) 280 { 281 PetscInt i, cnt = 0, f; 282 DMNamedVecLink nlink,nnext; 283 PetscErrorCode ierr; 284 285 PetscFunctionBegin; 286 if (!*dm) PetscFunctionReturn(0); 287 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 288 289 /* count all the circular references of DM and its contained Vecs */ 290 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 291 if ((*dm)->localin[i]) {cnt++;} 292 if ((*dm)->globalin[i]) {cnt++;} 293 } 294 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 295 if ((*dm)->x) { 296 DM obj; 297 ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr); 298 if (obj == *dm) cnt++; 299 } 300 301 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 302 /* 303 Need this test because the dm references the vectors that 304 reference the dm, so destroying the dm calls destroy on the 305 vectors that cause another destroy on the dm 306 */ 307 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 308 ((PetscObject) (*dm))->refct = 0; 309 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 310 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 311 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 312 } 313 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 314 nnext = nlink->next; 315 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 316 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 317 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 318 ierr = PetscFree(nlink);CHKERRQ(ierr); 319 } 320 (*dm)->namedglobal = PETSC_NULL; 321 322 /* Destroy the list of hooks */ 323 { 324 DMCoarsenHookLink link,next; 325 for (link=(*dm)->coarsenhook; link; link=next) { 326 next = link->next; 327 ierr = PetscFree(link);CHKERRQ(ierr); 328 } 329 (*dm)->coarsenhook = PETSC_NULL; 330 } 331 { 332 DMRefineHookLink link,next; 333 for (link=(*dm)->refinehook; link; link=next) { 334 next = link->next; 335 ierr = PetscFree(link);CHKERRQ(ierr); 336 } 337 (*dm)->refinehook = PETSC_NULL; 338 } 339 /* Destroy the work arrays */ 340 { 341 DMWorkLink link,next; 342 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 343 for (link=(*dm)->workin; link; link=next) { 344 next = link->next; 345 ierr = PetscFree(link->mem);CHKERRQ(ierr); 346 ierr = PetscFree(link);CHKERRQ(ierr); 347 } 348 (*dm)->workin = PETSC_NULL; 349 } 350 351 if ((*dm)->ctx && (*dm)->ctxdestroy) { 352 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 353 } 354 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 355 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 356 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 357 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 358 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 359 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 360 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 361 362 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 363 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 364 ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr); 365 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 366 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 367 368 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 369 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 370 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 371 372 for (f = 0; f < (*dm)->numFields; ++f) { 373 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 374 } 375 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 376 /* if memory was published with AMS then destroy it */ 377 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 378 379 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 380 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 381 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 382 PetscFunctionReturn(0); 383 } 384 385 #undef __FUNCT__ 386 #define __FUNCT__ "DMSetUp" 387 /*@ 388 DMSetUp - sets up the data structures inside a DM object 389 390 Collective on DM 391 392 Input Parameter: 393 . dm - the DM object to setup 394 395 Level: developer 396 397 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 398 399 @*/ 400 PetscErrorCode DMSetUp(DM dm) 401 { 402 PetscErrorCode ierr; 403 404 PetscFunctionBegin; 405 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 406 if (dm->setupcalled) PetscFunctionReturn(0); 407 if (dm->ops->setup) { 408 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 409 } 410 dm->setupcalled = PETSC_TRUE; 411 PetscFunctionReturn(0); 412 } 413 414 #undef __FUNCT__ 415 #define __FUNCT__ "DMSetFromOptions" 416 /*@ 417 DMSetFromOptions - sets parameters in a DM from the options database 418 419 Collective on DM 420 421 Input Parameter: 422 . dm - the DM object to set options for 423 424 Options Database: 425 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 426 . -dm_vec_type <type> type of vector to create inside DM 427 . -dm_mat_type <type> type of matrix to create inside DM 428 - -dm_coloring_type <global or ghosted> 429 430 Level: developer 431 432 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 433 434 @*/ 435 PetscErrorCode DMSetFromOptions(DM dm) 436 { 437 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 438 PetscErrorCode ierr; 439 char typeName[256] = MATAIJ; 440 441 PetscFunctionBegin; 442 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 443 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 444 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 445 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 446 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 447 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 448 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 449 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 450 if (flg) { 451 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 452 } 453 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 454 if (flg) { 455 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 456 } 457 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 458 if (dm->ops->setfromoptions) { 459 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 460 } 461 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 462 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 463 ierr = PetscOptionsEnd();CHKERRQ(ierr); 464 if (flg1) { 465 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 466 } 467 if (flg2) { 468 PetscViewer viewer; 469 470 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 471 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 472 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 473 ierr = DMView(dm, viewer);CHKERRQ(ierr); 474 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 475 } 476 if (flg3) { 477 PetscViewer viewer; 478 479 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 480 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 481 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 482 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 483 ierr = DMView(dm, viewer);CHKERRQ(ierr); 484 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 485 } 486 if (flg4) { 487 PetscViewer viewer; 488 489 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 490 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 491 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 492 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 493 ierr = DMView(dm, viewer);CHKERRQ(ierr); 494 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 495 } 496 PetscFunctionReturn(0); 497 } 498 499 #undef __FUNCT__ 500 #define __FUNCT__ "DMView" 501 /*@C 502 DMView - Views a vector packer or DMDA. 503 504 Collective on DM 505 506 Input Parameter: 507 + dm - the DM object to view 508 - v - the viewer 509 510 Level: developer 511 512 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 513 514 @*/ 515 PetscErrorCode DMView(DM dm,PetscViewer v) 516 { 517 PetscErrorCode ierr; 518 PetscBool isbinary; 519 520 PetscFunctionBegin; 521 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 522 if (!v) { 523 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 524 } 525 ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 526 if (isbinary) { 527 PetscInt classid = DM_FILE_CLASSID; 528 MPI_Comm comm; 529 PetscMPIInt rank; 530 char type[256]; 531 532 ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); 533 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 534 if (!rank) { 535 ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr); 536 ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr); 537 ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr); 538 } 539 } 540 if (dm->ops->view) { 541 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 542 } 543 PetscFunctionReturn(0); 544 } 545 546 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer); 547 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer); 548 549 #undef __FUNCT__ 550 #define __FUNCT__ "DMCreateGlobalVector" 551 /*@ 552 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 553 554 Collective on DM 555 556 Input Parameter: 557 . dm - the DM object 558 559 Output Parameter: 560 . vec - the global vector 561 562 Level: beginner 563 564 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 565 566 @*/ 567 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 568 { 569 PetscErrorCode ierr; 570 571 PetscFunctionBegin; 572 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 573 if (dm->defaultSection) { 574 PetscSection gSection; 575 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 576 577 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 578 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 579 for(p = pStart; p < pEnd; ++p) { 580 PetscInt dof, cdof; 581 582 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 583 ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr); 584 if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof; 585 if ((dof > 0) && (dof-cdof != blockSize)) { 586 blockSize = 1; 587 break; 588 } 589 } 590 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 591 if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize); 592 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 593 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 594 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 595 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 596 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 597 ierr = VecSetDM(*vec, dm);CHKERRQ(ierr); 598 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 599 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 600 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 601 ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr); 602 } else { 603 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 604 } 605 PetscFunctionReturn(0); 606 } 607 608 #undef __FUNCT__ 609 #define __FUNCT__ "DMCreateLocalVector" 610 /*@ 611 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 612 613 Not Collective 614 615 Input Parameter: 616 . dm - the DM object 617 618 Output Parameter: 619 . vec - the local vector 620 621 Level: beginner 622 623 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 624 625 @*/ 626 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 627 { 628 PetscErrorCode ierr; 629 630 PetscFunctionBegin; 631 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 632 if (dm->defaultSection) { 633 PetscInt localSize, blockSize = -1, pStart, pEnd, p; 634 635 ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr); 636 for(p = pStart; p < pEnd; ++p) { 637 PetscInt dof; 638 639 ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr); 640 if ((blockSize < 0) && (dof > 0)) blockSize = dof; 641 if ((dof > 0) && (dof != blockSize)) { 642 blockSize = 1; 643 break; 644 } 645 } 646 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 647 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 648 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 649 ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr); 650 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 651 ierr = VecSetDM(*vec, dm);CHKERRQ(ierr); 652 ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr); 653 } else { 654 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 655 } 656 PetscFunctionReturn(0); 657 } 658 659 #undef __FUNCT__ 660 #define __FUNCT__ "DMGetLocalToGlobalMapping" 661 /*@ 662 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 663 664 Collective on DM 665 666 Input Parameter: 667 . dm - the DM that provides the mapping 668 669 Output Parameter: 670 . ltog - the mapping 671 672 Level: intermediate 673 674 Notes: 675 This mapping can then be used by VecSetLocalToGlobalMapping() or 676 MatSetLocalToGlobalMapping(). 677 678 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 679 @*/ 680 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 681 { 682 PetscErrorCode ierr; 683 684 PetscFunctionBegin; 685 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 686 PetscValidPointer(ltog,2); 687 if (!dm->ltogmap) { 688 PetscSection section, sectionGlobal; 689 690 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 691 if (section) { 692 PetscInt *ltog; 693 PetscInt pStart, pEnd, size, p, l; 694 695 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 696 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 697 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 698 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 699 for (p = pStart, l = 0; p < pEnd; ++p) { 700 PetscInt dof, off, c; 701 702 /* Should probably use constrained dofs */ 703 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 704 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 705 for (c = 0; c < dof; ++c, ++l) { 706 ltog[l] = off+c; 707 } 708 } 709 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 710 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 711 } else { 712 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 713 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 714 } 715 } 716 *ltog = dm->ltogmap; 717 PetscFunctionReturn(0); 718 } 719 720 #undef __FUNCT__ 721 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 722 /*@ 723 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 724 725 Collective on DM 726 727 Input Parameter: 728 . da - the distributed array that provides the mapping 729 730 Output Parameter: 731 . ltog - the block mapping 732 733 Level: intermediate 734 735 Notes: 736 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 737 MatSetLocalToGlobalMappingBlock(). 738 739 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 740 @*/ 741 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 742 { 743 PetscErrorCode ierr; 744 745 PetscFunctionBegin; 746 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 747 PetscValidPointer(ltog,2); 748 if (!dm->ltogmapb) { 749 PetscInt bs; 750 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 751 if (bs > 1) { 752 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 753 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 754 } else { 755 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 756 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 757 } 758 } 759 *ltog = dm->ltogmapb; 760 PetscFunctionReturn(0); 761 } 762 763 #undef __FUNCT__ 764 #define __FUNCT__ "DMGetBlockSize" 765 /*@ 766 DMGetBlockSize - Gets the inherent block size associated with a DM 767 768 Not Collective 769 770 Input Parameter: 771 . dm - the DM with block structure 772 773 Output Parameter: 774 . bs - the block size, 1 implies no exploitable block structure 775 776 Level: intermediate 777 778 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 779 @*/ 780 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 781 { 782 PetscFunctionBegin; 783 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 784 PetscValidPointer(bs,2); 785 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 786 *bs = dm->bs; 787 PetscFunctionReturn(0); 788 } 789 790 #undef __FUNCT__ 791 #define __FUNCT__ "DMCreateInterpolation" 792 /*@ 793 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 794 795 Collective on DM 796 797 Input Parameter: 798 + dm1 - the DM object 799 - dm2 - the second, finer DM object 800 801 Output Parameter: 802 + mat - the interpolation 803 - vec - the scaling (optional) 804 805 Level: developer 806 807 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 808 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 809 810 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors 811 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 812 813 814 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 815 816 @*/ 817 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 818 { 819 PetscErrorCode ierr; 820 821 PetscFunctionBegin; 822 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 823 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 824 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 825 PetscFunctionReturn(0); 826 } 827 828 #undef __FUNCT__ 829 #define __FUNCT__ "DMCreateInjection" 830 /*@ 831 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 832 833 Collective on DM 834 835 Input Parameter: 836 + dm1 - the DM object 837 - dm2 - the second, finer DM object 838 839 Output Parameter: 840 . ctx - the injection 841 842 Level: developer 843 844 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 845 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 846 847 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 848 849 @*/ 850 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 851 { 852 PetscErrorCode ierr; 853 854 PetscFunctionBegin; 855 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 856 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 857 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 858 PetscFunctionReturn(0); 859 } 860 861 #undef __FUNCT__ 862 #define __FUNCT__ "DMCreateColoring" 863 /*@C 864 DMCreateColoring - Gets coloring for a DMDA or DMComposite 865 866 Collective on DM 867 868 Input Parameter: 869 + dm - the DM object 870 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 871 - matype - either MATAIJ or MATBAIJ 872 873 Output Parameter: 874 . coloring - the coloring 875 876 Level: developer 877 878 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 879 880 @*/ 881 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring) 882 { 883 PetscErrorCode ierr; 884 885 PetscFunctionBegin; 886 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 887 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 888 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 889 PetscFunctionReturn(0); 890 } 891 892 #undef __FUNCT__ 893 #define __FUNCT__ "DMCreateMatrix" 894 /*@C 895 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 896 897 Collective on DM 898 899 Input Parameter: 900 + dm - the DM object 901 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 902 any type which inherits from one of these (such as MATAIJ) 903 904 Output Parameter: 905 . mat - the empty Jacobian 906 907 Level: beginner 908 909 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 910 do not need to do it yourself. 911 912 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 913 the nonzero pattern call DMDASetMatPreallocateOnly() 914 915 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 916 internally by PETSc. 917 918 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 919 the indices for the global numbering for DMDAs which is complicated. 920 921 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 922 923 @*/ 924 PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat) 925 { 926 PetscErrorCode ierr; 927 928 PetscFunctionBegin; 929 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 930 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 931 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 932 #endif 933 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 934 PetscValidPointer(mat,3); 935 if (dm->mattype) { 936 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 937 } else { 938 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 939 } 940 PetscFunctionReturn(0); 941 } 942 943 #undef __FUNCT__ 944 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 945 /*@ 946 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 947 preallocated but the nonzero structure and zero values will not be set. 948 949 Logically Collective on DMDA 950 951 Input Parameter: 952 + dm - the DM 953 - only - PETSC_TRUE if only want preallocation 954 955 Level: developer 956 .seealso DMCreateMatrix() 957 @*/ 958 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 959 { 960 PetscFunctionBegin; 961 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 962 dm->prealloc_only = only; 963 PetscFunctionReturn(0); 964 } 965 966 #undef __FUNCT__ 967 #define __FUNCT__ "DMGetWorkArray" 968 /*@C 969 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 970 971 Not Collective 972 973 Input Parameters: 974 + dm - the DM object 975 . count - The minium size 976 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 977 978 Output Parameter: 979 . array - the work array 980 981 Level: developer 982 983 .seealso DMDestroy(), DMCreate() 984 @*/ 985 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 986 { 987 PetscErrorCode ierr; 988 DMWorkLink link; 989 size_t size; 990 991 PetscFunctionBegin; 992 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 993 PetscValidPointer(mem,4); 994 if (dm->workin) { 995 link = dm->workin; 996 dm->workin = dm->workin->next; 997 } else { 998 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 999 } 1000 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 1001 if (size*count > link->bytes) { 1002 ierr = PetscFree(link->mem);CHKERRQ(ierr); 1003 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 1004 link->bytes = size*count; 1005 } 1006 link->next = dm->workout; 1007 dm->workout = link; 1008 *(void**)mem = link->mem; 1009 PetscFunctionReturn(0); 1010 } 1011 1012 #undef __FUNCT__ 1013 #define __FUNCT__ "DMRestoreWorkArray" 1014 /*@C 1015 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 1016 1017 Not Collective 1018 1019 Input Parameters: 1020 + dm - the DM object 1021 . count - The minium size 1022 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 1023 1024 Output Parameter: 1025 . array - the work array 1026 1027 Level: developer 1028 1029 .seealso DMDestroy(), DMCreate() 1030 @*/ 1031 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 1032 { 1033 DMWorkLink *p,link; 1034 1035 PetscFunctionBegin; 1036 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1037 PetscValidPointer(mem,4); 1038 for (p=&dm->workout; (link=*p); p=&link->next) { 1039 if (link->mem == *(void**)mem) { 1040 *p = link->next; 1041 link->next = dm->workin; 1042 dm->workin = link; 1043 *(void**)mem = PETSC_NULL; 1044 PetscFunctionReturn(0); 1045 } 1046 } 1047 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 1048 PetscFunctionReturn(0); 1049 } 1050 1051 #undef __FUNCT__ 1052 #define __FUNCT__ "DMSetNullSpaceConstructor" 1053 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 1054 { 1055 PetscFunctionBegin; 1056 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 1057 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 1058 dm->nullspaceConstructors[field] = nullsp; 1059 PetscFunctionReturn(0); 1060 } 1061 1062 #undef __FUNCT__ 1063 #define __FUNCT__ "DMCreateFieldIS" 1064 /*@C 1065 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 1066 1067 Not collective 1068 1069 Input Parameter: 1070 . dm - the DM object 1071 1072 Output Parameters: 1073 + numFields - The number of fields (or PETSC_NULL if not requested) 1074 . fieldNames - The name for each field (or PETSC_NULL if not requested) 1075 - fields - The global indices for each field (or PETSC_NULL if not requested) 1076 1077 Level: intermediate 1078 1079 Notes: 1080 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1081 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 1082 PetscFree(). 1083 1084 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 1085 @*/ 1086 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 1087 { 1088 PetscSection section, sectionGlobal; 1089 PetscErrorCode ierr; 1090 1091 PetscFunctionBegin; 1092 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1093 if (numFields) { 1094 PetscValidPointer(numFields,2); 1095 *numFields = 0; 1096 } 1097 if (fieldNames) { 1098 PetscValidPointer(fieldNames,3); 1099 *fieldNames = PETSC_NULL; 1100 } 1101 if (fields) { 1102 PetscValidPointer(fields,4); 1103 *fields = PETSC_NULL; 1104 } 1105 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1106 if (section) { 1107 PetscInt *fieldSizes, **fieldIndices; 1108 PetscInt nF, f, pStart, pEnd, p; 1109 1110 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 1111 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 1112 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 1113 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 1114 for (f = 0; f < nF; ++f) { 1115 fieldSizes[f] = 0; 1116 } 1117 for (p = pStart; p < pEnd; ++p) { 1118 PetscInt gdof; 1119 1120 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1121 if (gdof > 0) { 1122 for (f = 0; f < nF; ++f) { 1123 PetscInt fdof, fcdof; 1124 1125 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1126 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1127 fieldSizes[f] += fdof-fcdof; 1128 } 1129 } 1130 } 1131 for (f = 0; f < nF; ++f) { 1132 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 1133 fieldSizes[f] = 0; 1134 } 1135 for (p = pStart; p < pEnd; ++p) { 1136 PetscInt gdof, goff; 1137 1138 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 1139 if (gdof > 0) { 1140 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 1141 for (f = 0; f < nF; ++f) { 1142 PetscInt fdof, fcdof, fc; 1143 1144 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 1145 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 1146 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 1147 fieldIndices[f][fieldSizes[f]] = goff++; 1148 } 1149 } 1150 } 1151 } 1152 if (numFields) {*numFields = nF;} 1153 if (fieldNames) { 1154 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 1155 for (f = 0; f < nF; ++f) { 1156 const char *fieldName; 1157 1158 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1159 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 1160 } 1161 } 1162 if (fields) { 1163 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1164 for (f = 0; f < nF; ++f) { 1165 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1166 } 1167 } 1168 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1169 } else { 1170 if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 1171 } 1172 PetscFunctionReturn(0); 1173 } 1174 1175 #undef __FUNCT__ 1176 #define __FUNCT__ "DMCreateFieldDecompositionDM" 1177 /*@C 1178 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 1179 1180 Not Collective 1181 1182 Input Parameters: 1183 + dm - the DM object 1184 - name - the name of the field decomposition 1185 1186 Output Parameter: 1187 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 1188 1189 Level: advanced 1190 1191 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1192 @*/ 1193 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 1194 { 1195 PetscErrorCode ierr; 1196 1197 PetscFunctionBegin; 1198 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1199 PetscValidCharPointer(name,2); 1200 PetscValidPointer(ddm,3); 1201 *ddm = PETSC_NULL; 1202 if (dm->ops->createfielddecompositiondm) { 1203 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1204 } 1205 PetscFunctionReturn(0); 1206 } 1207 1208 #undef __FUNCT__ 1209 #define __FUNCT__ "DMCreateFieldDecomposition" 1210 /*@C 1211 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1212 corresponding to different fields: each IS contains the global indices of the dofs of the 1213 corresponding field. The optional list of DMs define the DM for each subproblem. 1214 Generalizes DMCreateFieldIS(). 1215 1216 Not collective 1217 1218 Input Parameter: 1219 . dm - the DM object 1220 1221 Output Parameters: 1222 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1223 . namelist - The name for each field (or PETSC_NULL if not requested) 1224 . islist - The global indices for each field (or PETSC_NULL if not requested) 1225 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1226 1227 Level: intermediate 1228 1229 Notes: 1230 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1231 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1232 and all of the arrays should be freed with PetscFree(). 1233 1234 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1235 @*/ 1236 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1237 { 1238 PetscErrorCode ierr; 1239 1240 PetscFunctionBegin; 1241 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1242 if (len) {PetscValidPointer(len,2); *len = 0;} 1243 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1244 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1245 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1246 if (!dm->ops->createfielddecomposition) { 1247 PetscSection section; 1248 PetscInt numFields, f; 1249 1250 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1251 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1252 if (section && numFields && dm->ops->createsubdm) { 1253 *len = numFields; 1254 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1255 for (f = 0; f < numFields; ++f) { 1256 const char *fieldName; 1257 1258 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1259 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1260 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1261 } 1262 } else { 1263 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1264 /* By default there are no DMs associated with subproblems. */ 1265 if (dmlist) *dmlist = PETSC_NULL; 1266 } 1267 } 1268 else { 1269 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1270 } 1271 PetscFunctionReturn(0); 1272 } 1273 1274 #undef __FUNCT__ 1275 #define __FUNCT__ "DMCreateSubDM" 1276 /*@C 1277 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1278 The fields are defined by DMCreateFieldIS(). 1279 1280 Not collective 1281 1282 Input Parameters: 1283 + dm - the DM object 1284 . numFields - number of fields in this subproblem 1285 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1286 1287 Output Parameters: 1288 . is - The global indices for the subproblem 1289 - dm - The DM for the subproblem 1290 1291 Level: intermediate 1292 1293 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1294 @*/ 1295 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1296 { 1297 PetscErrorCode ierr; 1298 1299 PetscFunctionBegin; 1300 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1301 PetscValidPointer(fields,3); 1302 if (is) {PetscValidPointer(is,4);} 1303 if (subdm) {PetscValidPointer(subdm,5);} 1304 if (dm->ops->createsubdm) { 1305 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1306 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1307 PetscFunctionReturn(0); 1308 } 1309 1310 #undef __FUNCT__ 1311 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1312 /*@C 1313 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1314 1315 Not Collective 1316 1317 Input Parameters: 1318 + dm - the DM object 1319 - name - the name of the subdomain decomposition 1320 1321 Output Parameter: 1322 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1323 1324 Level: advanced 1325 1326 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1327 @*/ 1328 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1329 { 1330 PetscErrorCode ierr; 1331 1332 PetscFunctionBegin; 1333 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1334 PetscValidCharPointer(name,2); 1335 PetscValidPointer(ddm,3); 1336 *ddm = PETSC_NULL; 1337 if (dm->ops->createdomaindecompositiondm) { 1338 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1339 } 1340 PetscFunctionReturn(0); 1341 } 1342 1343 #undef __FUNCT__ 1344 #define __FUNCT__ "DMCreateDomainDecomposition" 1345 /*@C 1346 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1347 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1348 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1349 define a nonoverlapping covering, while outer subdomains can overlap. 1350 The optional list of DMs define the DM for each subproblem. 1351 1352 Not collective 1353 1354 Input Parameter: 1355 . dm - the DM object 1356 1357 Output Parameters: 1358 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1359 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1360 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1361 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1362 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1363 1364 Level: intermediate 1365 1366 Notes: 1367 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1368 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1369 and all of the arrays should be freed with PetscFree(). 1370 1371 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1372 @*/ 1373 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1374 { 1375 PetscErrorCode ierr; 1376 1377 PetscFunctionBegin; 1378 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1379 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1380 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1381 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1382 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1383 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1384 if (dm->ops->createdomaindecomposition) { 1385 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1386 } 1387 PetscFunctionReturn(0); 1388 } 1389 1390 #undef __FUNCT__ 1391 #define __FUNCT__ "DMRefine" 1392 /*@ 1393 DMRefine - Refines a DM object 1394 1395 Collective on DM 1396 1397 Input Parameter: 1398 + dm - the DM object 1399 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1400 1401 Output Parameter: 1402 . dmf - the refined DM, or PETSC_NULL 1403 1404 Note: If no refinement was done, the return value is PETSC_NULL 1405 1406 Level: developer 1407 1408 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1409 @*/ 1410 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1411 { 1412 PetscErrorCode ierr; 1413 DMRefineHookLink link; 1414 1415 PetscFunctionBegin; 1416 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1417 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1418 if (*dmf) { 1419 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1420 (*dmf)->ops->initialguess = dm->ops->initialguess; 1421 (*dmf)->ops->function = dm->ops->function; 1422 (*dmf)->ops->functionj = dm->ops->functionj; 1423 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1424 (*dmf)->ops->jacobian = dm->ops->jacobian; 1425 } 1426 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1427 (*dmf)->ctx = dm->ctx; 1428 (*dmf)->leveldown = dm->leveldown; 1429 (*dmf)->levelup = dm->levelup + 1; 1430 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1431 for (link=dm->refinehook; link; link=link->next) { 1432 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1433 } 1434 } 1435 PetscFunctionReturn(0); 1436 } 1437 1438 #undef __FUNCT__ 1439 #define __FUNCT__ "DMRefineHookAdd" 1440 /*@ 1441 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1442 1443 Logically Collective 1444 1445 Input Arguments: 1446 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1447 . refinehook - function to run when setting up a coarser level 1448 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1449 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1450 1451 Calling sequence of refinehook: 1452 $ refinehook(DM coarse,DM fine,void *ctx); 1453 1454 + coarse - coarse level DM 1455 . fine - fine level DM to interpolate problem to 1456 - ctx - optional user-defined function context 1457 1458 Calling sequence for interphook: 1459 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1460 1461 + coarse - coarse level DM 1462 . interp - matrix interpolating a coarse-level solution to the finer grid 1463 . fine - fine level DM to update 1464 - ctx - optional user-defined function context 1465 1466 Level: advanced 1467 1468 Notes: 1469 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1470 1471 If this function is called multiple times, the hooks will be run in the order they are added. 1472 1473 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1474 @*/ 1475 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1476 { 1477 PetscErrorCode ierr; 1478 DMRefineHookLink link,*p; 1479 1480 PetscFunctionBegin; 1481 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1482 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1483 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1484 link->refinehook = refinehook; 1485 link->interphook = interphook; 1486 link->ctx = ctx; 1487 link->next = PETSC_NULL; 1488 *p = link; 1489 PetscFunctionReturn(0); 1490 } 1491 1492 #undef __FUNCT__ 1493 #define __FUNCT__ "DMInterpolate" 1494 /*@ 1495 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1496 1497 Collective if any hooks are 1498 1499 Input Arguments: 1500 + coarse - coarser DM to use as a base 1501 . restrct - interpolation matrix, apply using MatInterpolate() 1502 - fine - finer DM to update 1503 1504 Level: developer 1505 1506 .seealso: DMRefineHookAdd(), MatInterpolate() 1507 @*/ 1508 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1509 { 1510 PetscErrorCode ierr; 1511 DMRefineHookLink link; 1512 1513 PetscFunctionBegin; 1514 for (link=fine->refinehook; link; link=link->next) { 1515 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1516 } 1517 PetscFunctionReturn(0); 1518 } 1519 1520 #undef __FUNCT__ 1521 #define __FUNCT__ "DMGetRefineLevel" 1522 /*@ 1523 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1524 1525 Not Collective 1526 1527 Input Parameter: 1528 . dm - the DM object 1529 1530 Output Parameter: 1531 . level - number of refinements 1532 1533 Level: developer 1534 1535 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1536 1537 @*/ 1538 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1539 { 1540 PetscFunctionBegin; 1541 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1542 *level = dm->levelup; 1543 PetscFunctionReturn(0); 1544 } 1545 1546 #undef __FUNCT__ 1547 #define __FUNCT__ "DMGlobalToLocalBegin" 1548 /*@ 1549 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1550 1551 Neighbor-wise Collective on DM 1552 1553 Input Parameters: 1554 + dm - the DM object 1555 . g - the global vector 1556 . mode - INSERT_VALUES or ADD_VALUES 1557 - l - the local vector 1558 1559 1560 Level: beginner 1561 1562 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1563 1564 @*/ 1565 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1566 { 1567 PetscSF sf; 1568 PetscErrorCode ierr; 1569 1570 PetscFunctionBegin; 1571 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1572 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1573 if (sf) { 1574 PetscScalar *lArray, *gArray; 1575 1576 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1577 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1578 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1579 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1580 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1581 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1582 } else { 1583 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1584 } 1585 PetscFunctionReturn(0); 1586 } 1587 1588 #undef __FUNCT__ 1589 #define __FUNCT__ "DMGlobalToLocalEnd" 1590 /*@ 1591 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1592 1593 Neighbor-wise Collective on DM 1594 1595 Input Parameters: 1596 + dm - the DM object 1597 . g - the global vector 1598 . mode - INSERT_VALUES or ADD_VALUES 1599 - l - the local vector 1600 1601 1602 Level: beginner 1603 1604 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1605 1606 @*/ 1607 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1608 { 1609 PetscSF sf; 1610 PetscErrorCode ierr; 1611 PetscScalar *lArray, *gArray; 1612 1613 PetscFunctionBegin; 1614 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1615 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1616 if (sf) { 1617 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1618 1619 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1620 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1621 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1622 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1623 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1624 } else { 1625 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1626 } 1627 PetscFunctionReturn(0); 1628 } 1629 1630 #undef __FUNCT__ 1631 #define __FUNCT__ "DMLocalToGlobalBegin" 1632 /*@ 1633 DMLocalToGlobalBegin - updates global vectors from local vectors 1634 1635 Neighbor-wise Collective on DM 1636 1637 Input Parameters: 1638 + dm - the DM object 1639 . l - the local vector 1640 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1641 base point. 1642 - - the global vector 1643 1644 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1645 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1646 global array to the final global array with VecAXPY(). 1647 1648 Level: beginner 1649 1650 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1651 1652 @*/ 1653 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1654 { 1655 PetscSF sf; 1656 PetscErrorCode ierr; 1657 1658 PetscFunctionBegin; 1659 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1660 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1661 if (sf) { 1662 MPI_Op op; 1663 PetscScalar *lArray, *gArray; 1664 1665 switch(mode) { 1666 case INSERT_VALUES: 1667 case INSERT_ALL_VALUES: 1668 #if defined(PETSC_HAVE_MPI_REPLACE) 1669 op = MPI_REPLACE; break; 1670 #else 1671 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1672 #endif 1673 case ADD_VALUES: 1674 case ADD_ALL_VALUES: 1675 op = MPI_SUM; break; 1676 default: 1677 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1678 } 1679 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1680 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1681 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1682 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1683 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1684 } else { 1685 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1686 } 1687 PetscFunctionReturn(0); 1688 } 1689 1690 #undef __FUNCT__ 1691 #define __FUNCT__ "DMLocalToGlobalEnd" 1692 /*@ 1693 DMLocalToGlobalEnd - updates global vectors from local vectors 1694 1695 Neighbor-wise Collective on DM 1696 1697 Input Parameters: 1698 + dm - the DM object 1699 . l - the local vector 1700 . mode - INSERT_VALUES or ADD_VALUES 1701 - g - the global vector 1702 1703 1704 Level: beginner 1705 1706 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1707 1708 @*/ 1709 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1710 { 1711 PetscSF sf; 1712 PetscErrorCode ierr; 1713 1714 PetscFunctionBegin; 1715 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1716 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1717 if (sf) { 1718 MPI_Op op; 1719 PetscScalar *lArray, *gArray; 1720 1721 switch(mode) { 1722 case INSERT_VALUES: 1723 case INSERT_ALL_VALUES: 1724 #if defined(PETSC_HAVE_MPI_REPLACE) 1725 op = MPI_REPLACE; break; 1726 #else 1727 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1728 #endif 1729 case ADD_VALUES: 1730 case ADD_ALL_VALUES: 1731 op = MPI_SUM; break; 1732 default: 1733 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1734 } 1735 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1736 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1737 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1738 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1739 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1740 } else { 1741 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1742 } 1743 PetscFunctionReturn(0); 1744 } 1745 1746 #undef __FUNCT__ 1747 #define __FUNCT__ "DMComputeJacobianDefault" 1748 /*@ 1749 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1750 1751 Collective on DM 1752 1753 Input Parameter: 1754 + dm - the DM object 1755 . x - location to compute Jacobian at; may be ignored for linear problems 1756 . A - matrix that defines the operator for the linear solve 1757 - B - the matrix used to construct the preconditioner 1758 1759 Level: developer 1760 1761 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1762 DMSetFunction() 1763 1764 @*/ 1765 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1766 { 1767 PetscErrorCode ierr; 1768 1769 PetscFunctionBegin; 1770 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1771 *stflag = SAME_NONZERO_PATTERN; 1772 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1773 if (A != B) { 1774 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1775 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1776 } 1777 PetscFunctionReturn(0); 1778 } 1779 1780 #undef __FUNCT__ 1781 #define __FUNCT__ "DMCoarsen" 1782 /*@ 1783 DMCoarsen - Coarsens a DM object 1784 1785 Collective on DM 1786 1787 Input Parameter: 1788 + dm - the DM object 1789 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1790 1791 Output Parameter: 1792 . dmc - the coarsened DM 1793 1794 Level: developer 1795 1796 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1797 1798 @*/ 1799 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1800 { 1801 PetscErrorCode ierr; 1802 DMCoarsenHookLink link; 1803 1804 PetscFunctionBegin; 1805 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1806 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1807 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1808 (*dmc)->ops->initialguess = dm->ops->initialguess; 1809 (*dmc)->ops->function = dm->ops->function; 1810 (*dmc)->ops->functionj = dm->ops->functionj; 1811 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1812 (*dmc)->ops->jacobian = dm->ops->jacobian; 1813 } 1814 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1815 (*dmc)->ctx = dm->ctx; 1816 (*dmc)->levelup = dm->levelup; 1817 (*dmc)->leveldown = dm->leveldown + 1; 1818 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1819 for (link=dm->coarsenhook; link; link=link->next) { 1820 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1821 } 1822 PetscFunctionReturn(0); 1823 } 1824 1825 #undef __FUNCT__ 1826 #define __FUNCT__ "DMCoarsenHookAdd" 1827 /*@ 1828 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1829 1830 Logically Collective 1831 1832 Input Arguments: 1833 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1834 . coarsenhook - function to run when setting up a coarser level 1835 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1836 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1837 1838 Calling sequence of coarsenhook: 1839 $ coarsenhook(DM fine,DM coarse,void *ctx); 1840 1841 + fine - fine level DM 1842 . coarse - coarse level DM to restrict problem to 1843 - ctx - optional user-defined function context 1844 1845 Calling sequence for restricthook: 1846 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1847 1848 + fine - fine level DM 1849 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1850 . rscale - scaling vector for restriction 1851 . inject - matrix restricting by injection 1852 . coarse - coarse level DM to update 1853 - ctx - optional user-defined function context 1854 1855 Level: advanced 1856 1857 Notes: 1858 This function is only needed if auxiliary data needs to be set up on coarse grids. 1859 1860 If this function is called multiple times, the hooks will be run in the order they are added. 1861 1862 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1863 extract the finest level information from its context (instead of from the SNES). 1864 1865 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1866 @*/ 1867 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1868 { 1869 PetscErrorCode ierr; 1870 DMCoarsenHookLink link,*p; 1871 1872 PetscFunctionBegin; 1873 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1874 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1875 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1876 link->coarsenhook = coarsenhook; 1877 link->restricthook = restricthook; 1878 link->ctx = ctx; 1879 link->next = PETSC_NULL; 1880 *p = link; 1881 PetscFunctionReturn(0); 1882 } 1883 1884 #undef __FUNCT__ 1885 #define __FUNCT__ "DMRestrict" 1886 /*@ 1887 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1888 1889 Collective if any hooks are 1890 1891 Input Arguments: 1892 + fine - finer DM to use as a base 1893 . restrct - restriction matrix, apply using MatRestrict() 1894 . inject - injection matrix, also use MatRestrict() 1895 - coarse - coarer DM to update 1896 1897 Level: developer 1898 1899 .seealso: DMCoarsenHookAdd(), MatRestrict() 1900 @*/ 1901 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1902 { 1903 PetscErrorCode ierr; 1904 DMCoarsenHookLink link; 1905 1906 PetscFunctionBegin; 1907 for (link=fine->coarsenhook; link; link=link->next) { 1908 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1909 } 1910 PetscFunctionReturn(0); 1911 } 1912 1913 #undef __FUNCT__ 1914 #define __FUNCT__ "DMBlockRestrictHookAdd" 1915 /*@ 1916 DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1917 1918 Logically Collective 1919 1920 Input Arguments: 1921 + global - global DM 1922 . restricthook - function to run to update data on block solve (at the beginning of the block solve) 1923 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1924 1925 Calling sequence for restricthook: 1926 $ restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx) 1927 1928 + global - global DM 1929 . out - scatter to the outer (with ghost and overlap points) block vector 1930 . in - scatter to block vector values only owned locally 1931 . block - block DM (may just be a shell if the global DM is passed in correctly) 1932 - ctx - optional user-defined function context 1933 1934 Level: advanced 1935 1936 Notes: 1937 This function is only needed if auxiliary data needs to be set up on coarse grids. 1938 1939 If this function is called multiple times, the hooks will be run in the order they are added. 1940 1941 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1942 extract the finest level information from its context (instead of from the SNES). 1943 1944 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1945 @*/ 1946 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx) 1947 { 1948 PetscErrorCode ierr; 1949 DMBlockRestrictHookLink link,*p; 1950 1951 PetscFunctionBegin; 1952 PetscValidHeaderSpecific(global,DM_CLASSID,1); 1953 for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1954 ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr); 1955 link->restricthook = restricthook; 1956 link->ctx = ctx; 1957 link->next = PETSC_NULL; 1958 *p = link; 1959 PetscFunctionReturn(0); 1960 } 1961 1962 #undef __FUNCT__ 1963 #define __FUNCT__ "DMBlockRestrict" 1964 /*@ 1965 DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd() 1966 1967 Collective if any hooks are 1968 1969 Input Arguments: 1970 + fine - finer DM to use as a base 1971 . restrct - restriction matrix, apply using MatRestrict() 1972 . inject - injection matrix, also use MatRestrict() 1973 - coarse - coarer DM to update 1974 1975 Level: developer 1976 1977 .seealso: DMCoarsenHookAdd(), MatRestrict() 1978 @*/ 1979 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block) 1980 { 1981 PetscErrorCode ierr; 1982 DMBlockRestrictHookLink link; 1983 1984 PetscFunctionBegin; 1985 for (link=global->blockrestricthook; link; link=link->next) { 1986 if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);} 1987 } 1988 PetscFunctionReturn(0); 1989 } 1990 1991 #undef __FUNCT__ 1992 #define __FUNCT__ "DMGetCoarsenLevel" 1993 /*@ 1994 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1995 1996 Not Collective 1997 1998 Input Parameter: 1999 . dm - the DM object 2000 2001 Output Parameter: 2002 . level - number of coarsenings 2003 2004 Level: developer 2005 2006 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2007 2008 @*/ 2009 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 2010 { 2011 PetscFunctionBegin; 2012 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2013 *level = dm->leveldown; 2014 PetscFunctionReturn(0); 2015 } 2016 2017 2018 2019 #undef __FUNCT__ 2020 #define __FUNCT__ "DMRefineHierarchy" 2021 /*@C 2022 DMRefineHierarchy - Refines a DM object, all levels at once 2023 2024 Collective on DM 2025 2026 Input Parameter: 2027 + dm - the DM object 2028 - nlevels - the number of levels of refinement 2029 2030 Output Parameter: 2031 . dmf - the refined DM hierarchy 2032 2033 Level: developer 2034 2035 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2036 2037 @*/ 2038 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 2039 { 2040 PetscErrorCode ierr; 2041 2042 PetscFunctionBegin; 2043 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2044 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2045 if (nlevels == 0) PetscFunctionReturn(0); 2046 if (dm->ops->refinehierarchy) { 2047 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 2048 } else if (dm->ops->refine) { 2049 PetscInt i; 2050 2051 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 2052 for (i=1; i<nlevels; i++) { 2053 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 2054 } 2055 } else { 2056 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 2057 } 2058 PetscFunctionReturn(0); 2059 } 2060 2061 #undef __FUNCT__ 2062 #define __FUNCT__ "DMCoarsenHierarchy" 2063 /*@C 2064 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 2065 2066 Collective on DM 2067 2068 Input Parameter: 2069 + dm - the DM object 2070 - nlevels - the number of levels of coarsening 2071 2072 Output Parameter: 2073 . dmc - the coarsened DM hierarchy 2074 2075 Level: developer 2076 2077 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 2078 2079 @*/ 2080 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 2081 { 2082 PetscErrorCode ierr; 2083 2084 PetscFunctionBegin; 2085 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2086 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 2087 if (nlevels == 0) PetscFunctionReturn(0); 2088 PetscValidPointer(dmc,3); 2089 if (dm->ops->coarsenhierarchy) { 2090 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 2091 } else if (dm->ops->coarsen) { 2092 PetscInt i; 2093 2094 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 2095 for (i=1; i<nlevels; i++) { 2096 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 2097 } 2098 } else { 2099 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 2100 } 2101 PetscFunctionReturn(0); 2102 } 2103 2104 #undef __FUNCT__ 2105 #define __FUNCT__ "DMCreateAggregates" 2106 /*@ 2107 DMCreateAggregates - Gets the aggregates that map between 2108 grids associated with two DMs. 2109 2110 Collective on DM 2111 2112 Input Parameters: 2113 + dmc - the coarse grid DM 2114 - dmf - the fine grid DM 2115 2116 Output Parameters: 2117 . rest - the restriction matrix (transpose of the projection matrix) 2118 2119 Level: intermediate 2120 2121 .keywords: interpolation, restriction, multigrid 2122 2123 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 2124 @*/ 2125 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 2126 { 2127 PetscErrorCode ierr; 2128 2129 PetscFunctionBegin; 2130 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 2131 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 2132 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 2133 PetscFunctionReturn(0); 2134 } 2135 2136 #undef __FUNCT__ 2137 #define __FUNCT__ "DMSetApplicationContextDestroy" 2138 /*@C 2139 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 2140 2141 Not Collective 2142 2143 Input Parameters: 2144 + dm - the DM object 2145 - destroy - the destroy function 2146 2147 Level: intermediate 2148 2149 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2150 2151 @*/ 2152 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 2153 { 2154 PetscFunctionBegin; 2155 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2156 dm->ctxdestroy = destroy; 2157 PetscFunctionReturn(0); 2158 } 2159 2160 #undef __FUNCT__ 2161 #define __FUNCT__ "DMSetApplicationContext" 2162 /*@ 2163 DMSetApplicationContext - Set a user context into a DM object 2164 2165 Not Collective 2166 2167 Input Parameters: 2168 + dm - the DM object 2169 - ctx - the user context 2170 2171 Level: intermediate 2172 2173 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2174 2175 @*/ 2176 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 2177 { 2178 PetscFunctionBegin; 2179 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2180 dm->ctx = ctx; 2181 PetscFunctionReturn(0); 2182 } 2183 2184 #undef __FUNCT__ 2185 #define __FUNCT__ "DMGetApplicationContext" 2186 /*@ 2187 DMGetApplicationContext - Gets a user context from a DM object 2188 2189 Not Collective 2190 2191 Input Parameter: 2192 . dm - the DM object 2193 2194 Output Parameter: 2195 . ctx - the user context 2196 2197 Level: intermediate 2198 2199 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 2200 2201 @*/ 2202 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 2203 { 2204 PetscFunctionBegin; 2205 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2206 *(void**)ctx = dm->ctx; 2207 PetscFunctionReturn(0); 2208 } 2209 2210 #undef __FUNCT__ 2211 #define __FUNCT__ "DMSetInitialGuess" 2212 /*@C 2213 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 2214 2215 Logically Collective on DM 2216 2217 Input Parameter: 2218 + dm - the DM object to destroy 2219 - f - the function to compute the initial guess 2220 2221 Level: intermediate 2222 2223 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2224 2225 @*/ 2226 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 2227 { 2228 PetscFunctionBegin; 2229 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2230 dm->ops->initialguess = f; 2231 PetscFunctionReturn(0); 2232 } 2233 2234 #undef __FUNCT__ 2235 #define __FUNCT__ "DMSetFunction" 2236 /*@C 2237 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 2238 2239 Logically Collective on DM 2240 2241 Input Parameter: 2242 + dm - the DM object 2243 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 2244 2245 Level: intermediate 2246 2247 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 2248 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 2249 2250 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2251 DMSetJacobian() 2252 2253 @*/ 2254 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2255 { 2256 PetscFunctionBegin; 2257 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2258 dm->ops->function = f; 2259 if (f) { 2260 dm->ops->functionj = f; 2261 } 2262 PetscFunctionReturn(0); 2263 } 2264 2265 #undef __FUNCT__ 2266 #define __FUNCT__ "DMSetJacobian" 2267 /*@C 2268 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 2269 2270 Logically Collective on DM 2271 2272 Input Parameter: 2273 + dm - the DM object to destroy 2274 - f - the function to compute the matrix entries 2275 2276 Level: intermediate 2277 2278 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2279 DMSetFunction() 2280 2281 @*/ 2282 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 2283 { 2284 PetscFunctionBegin; 2285 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2286 dm->ops->jacobian = f; 2287 PetscFunctionReturn(0); 2288 } 2289 2290 #undef __FUNCT__ 2291 #define __FUNCT__ "DMSetVariableBounds" 2292 /*@C 2293 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2294 2295 Logically Collective on DM 2296 2297 Input Parameter: 2298 + dm - the DM object 2299 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 2300 2301 Level: intermediate 2302 2303 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2304 DMSetJacobian() 2305 2306 @*/ 2307 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2308 { 2309 PetscFunctionBegin; 2310 dm->ops->computevariablebounds = f; 2311 PetscFunctionReturn(0); 2312 } 2313 2314 #undef __FUNCT__ 2315 #define __FUNCT__ "DMHasVariableBounds" 2316 /*@ 2317 DMHasVariableBounds - does the DM object have a variable bounds function? 2318 2319 Not Collective 2320 2321 Input Parameter: 2322 . dm - the DM object to destroy 2323 2324 Output Parameter: 2325 . flg - PETSC_TRUE if the variable bounds function exists 2326 2327 Level: developer 2328 2329 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2330 2331 @*/ 2332 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2333 { 2334 PetscFunctionBegin; 2335 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2336 PetscFunctionReturn(0); 2337 } 2338 2339 #undef __FUNCT__ 2340 #define __FUNCT__ "DMComputeVariableBounds" 2341 /*@C 2342 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2343 2344 Logically Collective on DM 2345 2346 Input Parameters: 2347 + dm - the DM object to destroy 2348 - x - current solution at which the bounds are computed 2349 2350 Output parameters: 2351 + xl - lower bound 2352 - xu - upper bound 2353 2354 Level: intermediate 2355 2356 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2357 DMSetFunction(), DMSetVariableBounds() 2358 2359 @*/ 2360 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2361 { 2362 PetscErrorCode ierr; 2363 PetscFunctionBegin; 2364 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2365 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2366 if (dm->ops->computevariablebounds) { 2367 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2368 } 2369 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2370 PetscFunctionReturn(0); 2371 } 2372 2373 #undef __FUNCT__ 2374 #define __FUNCT__ "DMComputeInitialGuess" 2375 /*@ 2376 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2377 2378 Collective on DM 2379 2380 Input Parameter: 2381 + dm - the DM object to destroy 2382 - x - the vector to hold the initial guess values 2383 2384 Level: developer 2385 2386 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2387 2388 @*/ 2389 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2390 { 2391 PetscErrorCode ierr; 2392 PetscFunctionBegin; 2393 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2394 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2395 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2396 PetscFunctionReturn(0); 2397 } 2398 2399 #undef __FUNCT__ 2400 #define __FUNCT__ "DMHasInitialGuess" 2401 /*@ 2402 DMHasInitialGuess - does the DM object have an initial guess function 2403 2404 Not Collective 2405 2406 Input Parameter: 2407 . dm - the DM object to destroy 2408 2409 Output Parameter: 2410 . flg - PETSC_TRUE if function exists 2411 2412 Level: developer 2413 2414 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2415 2416 @*/ 2417 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2418 { 2419 PetscFunctionBegin; 2420 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2421 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2422 PetscFunctionReturn(0); 2423 } 2424 2425 #undef __FUNCT__ 2426 #define __FUNCT__ "DMHasFunction" 2427 /*@ 2428 DMHasFunction - does the DM object have a function 2429 2430 Not Collective 2431 2432 Input Parameter: 2433 . dm - the DM object to destroy 2434 2435 Output Parameter: 2436 . flg - PETSC_TRUE if function exists 2437 2438 Level: developer 2439 2440 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2441 2442 @*/ 2443 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2444 { 2445 PetscFunctionBegin; 2446 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2447 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2448 PetscFunctionReturn(0); 2449 } 2450 2451 #undef __FUNCT__ 2452 #define __FUNCT__ "DMHasJacobian" 2453 /*@ 2454 DMHasJacobian - does the DM object have a matrix function 2455 2456 Not Collective 2457 2458 Input Parameter: 2459 . dm - the DM object to destroy 2460 2461 Output Parameter: 2462 . flg - PETSC_TRUE if function exists 2463 2464 Level: developer 2465 2466 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2467 2468 @*/ 2469 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2470 { 2471 PetscFunctionBegin; 2472 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2473 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2474 PetscFunctionReturn(0); 2475 } 2476 2477 #undef __FUNCT__ 2478 #define __FUNCT__ "DMHasColoring" 2479 /*@ 2480 DMHasColoring - does the DM object have a method of providing a coloring? 2481 2482 Not Collective 2483 2484 Input Parameter: 2485 . dm - the DM object 2486 2487 Output Parameter: 2488 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2489 2490 Level: developer 2491 2492 .seealso DMHasFunction(), DMCreateColoring() 2493 2494 @*/ 2495 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2496 { 2497 PetscFunctionBegin; 2498 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2499 PetscFunctionReturn(0); 2500 } 2501 2502 #undef __FUNCT__ 2503 #define __FUNCT__ "DMSetVec" 2504 /*@C 2505 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2506 2507 Collective on DM 2508 2509 Input Parameter: 2510 + dm - the DM object 2511 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2512 2513 Level: developer 2514 2515 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2516 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2517 2518 @*/ 2519 PetscErrorCode DMSetVec(DM dm,Vec x) 2520 { 2521 PetscErrorCode ierr; 2522 PetscFunctionBegin; 2523 if (x) { 2524 if (!dm->x) { 2525 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2526 } 2527 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2528 } 2529 else if (dm->x) { 2530 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2531 } 2532 PetscFunctionReturn(0); 2533 } 2534 2535 2536 #undef __FUNCT__ 2537 #define __FUNCT__ "DMComputeFunction" 2538 /*@ 2539 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2540 2541 Collective on DM 2542 2543 Input Parameter: 2544 + dm - the DM object to destroy 2545 . x - the location where the function is evaluationed, may be ignored for linear problems 2546 - b - the vector to hold the right hand side entries 2547 2548 Level: developer 2549 2550 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2551 DMSetJacobian() 2552 2553 @*/ 2554 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2555 { 2556 PetscErrorCode ierr; 2557 PetscFunctionBegin; 2558 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2559 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2560 PetscStackPush("DM user function"); 2561 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2562 PetscStackPop; 2563 PetscFunctionReturn(0); 2564 } 2565 2566 2567 2568 #undef __FUNCT__ 2569 #define __FUNCT__ "DMComputeJacobian" 2570 /*@ 2571 DMComputeJacobian - compute the matrix entries for the solver 2572 2573 Collective on DM 2574 2575 Input Parameter: 2576 + dm - the DM object 2577 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2578 . A - matrix that defines the operator for the linear solve 2579 - B - the matrix used to construct the preconditioner 2580 2581 Level: developer 2582 2583 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2584 DMSetFunction() 2585 2586 @*/ 2587 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2588 { 2589 PetscErrorCode ierr; 2590 2591 PetscFunctionBegin; 2592 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2593 if (!dm->ops->jacobian) { 2594 ISColoring coloring; 2595 MatFDColoring fd; 2596 MatType mtype; 2597 2598 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2599 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2600 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2601 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2602 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2603 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2604 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2605 2606 dm->fd = fd; 2607 dm->ops->jacobian = DMComputeJacobianDefault; 2608 2609 /* don't know why this is needed */ 2610 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2611 } 2612 if (!x) x = dm->x; 2613 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2614 2615 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2616 if (x) { 2617 if (!dm->x) { 2618 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2619 } 2620 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2621 } 2622 if (A != B) { 2623 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2624 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2625 } 2626 PetscFunctionReturn(0); 2627 } 2628 2629 2630 PetscFList DMList = PETSC_NULL; 2631 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2632 2633 #undef __FUNCT__ 2634 #define __FUNCT__ "DMSetType" 2635 /*@C 2636 DMSetType - Builds a DM, for a particular DM implementation. 2637 2638 Collective on DM 2639 2640 Input Parameters: 2641 + dm - The DM object 2642 - method - The name of the DM type 2643 2644 Options Database Key: 2645 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2646 2647 Notes: 2648 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2649 2650 Level: intermediate 2651 2652 .keywords: DM, set, type 2653 .seealso: DMGetType(), DMCreate() 2654 @*/ 2655 PetscErrorCode DMSetType(DM dm, DMType method) 2656 { 2657 PetscErrorCode (*r)(DM); 2658 PetscBool match; 2659 PetscErrorCode ierr; 2660 2661 PetscFunctionBegin; 2662 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2663 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2664 if (match) PetscFunctionReturn(0); 2665 2666 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2667 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2668 if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2669 2670 if (dm->ops->destroy) { 2671 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2672 dm->ops->destroy = PETSC_NULL; 2673 } 2674 ierr = (*r)(dm);CHKERRQ(ierr); 2675 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2676 PetscFunctionReturn(0); 2677 } 2678 2679 #undef __FUNCT__ 2680 #define __FUNCT__ "DMGetType" 2681 /*@C 2682 DMGetType - Gets the DM type name (as a string) from the DM. 2683 2684 Not Collective 2685 2686 Input Parameter: 2687 . dm - The DM 2688 2689 Output Parameter: 2690 . type - The DM type name 2691 2692 Level: intermediate 2693 2694 .keywords: DM, get, type, name 2695 .seealso: DMSetType(), DMCreate() 2696 @*/ 2697 PetscErrorCode DMGetType(DM dm, DMType *type) 2698 { 2699 PetscErrorCode ierr; 2700 2701 PetscFunctionBegin; 2702 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2703 PetscValidCharPointer(type,2); 2704 if (!DMRegisterAllCalled) { 2705 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2706 } 2707 *type = ((PetscObject)dm)->type_name; 2708 PetscFunctionReturn(0); 2709 } 2710 2711 #undef __FUNCT__ 2712 #define __FUNCT__ "DMConvert" 2713 /*@C 2714 DMConvert - Converts a DM to another DM, either of the same or different type. 2715 2716 Collective on DM 2717 2718 Input Parameters: 2719 + dm - the DM 2720 - newtype - new DM type (use "same" for the same type) 2721 2722 Output Parameter: 2723 . M - pointer to new DM 2724 2725 Notes: 2726 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2727 the MPI communicator of the generated DM is always the same as the communicator 2728 of the input DM. 2729 2730 Level: intermediate 2731 2732 .seealso: DMCreate() 2733 @*/ 2734 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M) 2735 { 2736 DM B; 2737 char convname[256]; 2738 PetscBool sametype, issame; 2739 PetscErrorCode ierr; 2740 2741 PetscFunctionBegin; 2742 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2743 PetscValidType(dm,1); 2744 PetscValidPointer(M,3); 2745 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2746 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2747 { 2748 PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL; 2749 2750 /* 2751 Order of precedence: 2752 1) See if a specialized converter is known to the current DM. 2753 2) See if a specialized converter is known to the desired DM class. 2754 3) See if a good general converter is registered for the desired class 2755 4) See if a good general converter is known for the current matrix. 2756 5) Use a really basic converter. 2757 */ 2758 2759 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2760 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2761 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2762 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2763 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2764 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2765 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2766 if (conv) goto foundconv; 2767 2768 /* 2) See if a specialized converter is known to the desired DM class. */ 2769 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2770 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2771 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2772 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2773 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2774 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2775 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2776 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2777 if (conv) { 2778 ierr = DMDestroy(&B);CHKERRQ(ierr); 2779 goto foundconv; 2780 } 2781 2782 #if 0 2783 /* 3) See if a good general converter is registered for the desired class */ 2784 conv = B->ops->convertfrom; 2785 ierr = DMDestroy(&B);CHKERRQ(ierr); 2786 if (conv) goto foundconv; 2787 2788 /* 4) See if a good general converter is known for the current matrix */ 2789 if (dm->ops->convert) { 2790 conv = dm->ops->convert; 2791 } 2792 if (conv) goto foundconv; 2793 #endif 2794 2795 /* 5) Use a really basic converter. */ 2796 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2797 2798 foundconv: 2799 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2800 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2801 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2802 } 2803 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2804 PetscFunctionReturn(0); 2805 } 2806 2807 /*--------------------------------------------------------------------------------------------------------------------*/ 2808 2809 #undef __FUNCT__ 2810 #define __FUNCT__ "DMRegister" 2811 /*@C 2812 DMRegister - See DMRegisterDynamic() 2813 2814 Level: advanced 2815 @*/ 2816 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2817 { 2818 char fullname[PETSC_MAX_PATH_LEN]; 2819 PetscErrorCode ierr; 2820 2821 PetscFunctionBegin; 2822 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2823 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2824 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2825 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2826 PetscFunctionReturn(0); 2827 } 2828 2829 2830 /*--------------------------------------------------------------------------------------------------------------------*/ 2831 #undef __FUNCT__ 2832 #define __FUNCT__ "DMRegisterDestroy" 2833 /*@C 2834 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2835 2836 Not Collective 2837 2838 Level: advanced 2839 2840 .keywords: DM, register, destroy 2841 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2842 @*/ 2843 PetscErrorCode DMRegisterDestroy(void) 2844 { 2845 PetscErrorCode ierr; 2846 2847 PetscFunctionBegin; 2848 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2849 DMRegisterAllCalled = PETSC_FALSE; 2850 PetscFunctionReturn(0); 2851 } 2852 2853 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2854 #include <mex.h> 2855 2856 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2857 2858 #undef __FUNCT__ 2859 #define __FUNCT__ "DMComputeFunction_Matlab" 2860 /* 2861 DMComputeFunction_Matlab - Calls the function that has been set with 2862 DMSetFunctionMatlab(). 2863 2864 For linear problems x is null 2865 2866 .seealso: DMSetFunction(), DMGetFunction() 2867 */ 2868 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2869 { 2870 PetscErrorCode ierr; 2871 DMMatlabContext *sctx; 2872 int nlhs = 1,nrhs = 4; 2873 mxArray *plhs[1],*prhs[4]; 2874 long long int lx = 0,ly = 0,ls = 0; 2875 2876 PetscFunctionBegin; 2877 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2878 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2879 PetscCheckSameComm(dm,1,y,3); 2880 2881 /* call Matlab function in ctx with arguments x and y */ 2882 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2883 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2884 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2885 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2886 prhs[0] = mxCreateDoubleScalar((double)ls); 2887 prhs[1] = mxCreateDoubleScalar((double)lx); 2888 prhs[2] = mxCreateDoubleScalar((double)ly); 2889 prhs[3] = mxCreateString(sctx->funcname); 2890 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2891 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2892 mxDestroyArray(prhs[0]); 2893 mxDestroyArray(prhs[1]); 2894 mxDestroyArray(prhs[2]); 2895 mxDestroyArray(prhs[3]); 2896 mxDestroyArray(plhs[0]); 2897 PetscFunctionReturn(0); 2898 } 2899 2900 2901 #undef __FUNCT__ 2902 #define __FUNCT__ "DMSetFunctionMatlab" 2903 /* 2904 DMSetFunctionMatlab - Sets the function evaluation routine 2905 2906 */ 2907 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2908 { 2909 PetscErrorCode ierr; 2910 DMMatlabContext *sctx; 2911 2912 PetscFunctionBegin; 2913 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2914 /* currently sctx is memory bleed */ 2915 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2916 if (!sctx) { 2917 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2918 } 2919 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2920 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2921 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2922 PetscFunctionReturn(0); 2923 } 2924 2925 #undef __FUNCT__ 2926 #define __FUNCT__ "DMComputeJacobian_Matlab" 2927 /* 2928 DMComputeJacobian_Matlab - Calls the function that has been set with 2929 DMSetJacobianMatlab(). 2930 2931 For linear problems x is null 2932 2933 .seealso: DMSetFunction(), DMGetFunction() 2934 */ 2935 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2936 { 2937 PetscErrorCode ierr; 2938 DMMatlabContext *sctx; 2939 int nlhs = 2,nrhs = 5; 2940 mxArray *plhs[2],*prhs[5]; 2941 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2942 2943 PetscFunctionBegin; 2944 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2945 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2946 2947 /* call MATLAB function in ctx with arguments x, A, and B */ 2948 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2949 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2950 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2951 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2952 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2953 prhs[0] = mxCreateDoubleScalar((double)ls); 2954 prhs[1] = mxCreateDoubleScalar((double)lx); 2955 prhs[2] = mxCreateDoubleScalar((double)lA); 2956 prhs[3] = mxCreateDoubleScalar((double)lB); 2957 prhs[4] = mxCreateString(sctx->jacname); 2958 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2959 *str = (MatStructure) mxGetScalar(plhs[0]); 2960 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2961 mxDestroyArray(prhs[0]); 2962 mxDestroyArray(prhs[1]); 2963 mxDestroyArray(prhs[2]); 2964 mxDestroyArray(prhs[3]); 2965 mxDestroyArray(prhs[4]); 2966 mxDestroyArray(plhs[0]); 2967 mxDestroyArray(plhs[1]); 2968 PetscFunctionReturn(0); 2969 } 2970 2971 2972 #undef __FUNCT__ 2973 #define __FUNCT__ "DMSetJacobianMatlab" 2974 /* 2975 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2976 2977 */ 2978 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2979 { 2980 PetscErrorCode ierr; 2981 DMMatlabContext *sctx; 2982 2983 PetscFunctionBegin; 2984 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2985 /* currently sctx is memory bleed */ 2986 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2987 if (!sctx) { 2988 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2989 } 2990 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2991 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2992 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2993 PetscFunctionReturn(0); 2994 } 2995 #endif 2996 2997 #undef __FUNCT__ 2998 #define __FUNCT__ "DMLoad" 2999 /*@C 3000 DMLoad - Loads a DM that has been stored in binary with DMView(). 3001 3002 Collective on PetscViewer 3003 3004 Input Parameters: 3005 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 3006 some related function before a call to DMLoad(). 3007 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 3008 HDF5 file viewer, obtained from PetscViewerHDF5Open() 3009 3010 Level: intermediate 3011 3012 Notes: 3013 The type is determined by the data in the file, any type set into the DM before this call is ignored. 3014 3015 Notes for advanced users: 3016 Most users should not need to know the details of the binary storage 3017 format, since DMLoad() and DMView() completely hide these details. 3018 But for anyone who's interested, the standard binary matrix storage 3019 format is 3020 .vb 3021 has not yet been determined 3022 .ve 3023 3024 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 3025 @*/ 3026 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 3027 { 3028 PetscErrorCode ierr; 3029 PetscBool isbinary; 3030 PetscInt classid; 3031 char type[256]; 3032 3033 PetscFunctionBegin; 3034 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 3035 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 3036 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 3037 if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()"); 3038 3039 ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr); 3040 if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file"); 3041 ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr); 3042 ierr = DMSetType(newdm, type);CHKERRQ(ierr); 3043 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 3044 PetscFunctionReturn(0); 3045 } 3046 3047 /******************************** FEM Support **********************************/ 3048 3049 #undef __FUNCT__ 3050 #define __FUNCT__ "DMPrintCellVector" 3051 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 3052 PetscInt f; 3053 PetscErrorCode ierr; 3054 3055 PetscFunctionBegin; 3056 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 3057 for (f = 0; f < len; ++f) { 3058 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 3059 } 3060 PetscFunctionReturn(0); 3061 } 3062 3063 #undef __FUNCT__ 3064 #define __FUNCT__ "DMPrintCellMatrix" 3065 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 3066 PetscInt f, g; 3067 PetscErrorCode ierr; 3068 3069 PetscFunctionBegin; 3070 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 3071 for (f = 0; f < rows; ++f) { 3072 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 3073 for (g = 0; g < cols; ++g) { 3074 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 3075 } 3076 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 3077 } 3078 PetscFunctionReturn(0); 3079 } 3080 3081 #undef __FUNCT__ 3082 #define __FUNCT__ "DMGetLocalFunction" 3083 /*@C 3084 DMGetLocalFunction - Get the local residual function from this DM 3085 3086 Not collective 3087 3088 Input Parameter: 3089 . dm - The DM 3090 3091 Output Parameter: 3092 . lf - The local residual function 3093 3094 Calling sequence of lf: 3095 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3096 3097 + snes - the SNES context 3098 . x - local vector with the state at which to evaluate residual 3099 . f - local vector to put residual in 3100 - ctx - optional user-defined function context 3101 3102 Level: intermediate 3103 3104 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3105 @*/ 3106 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 3107 { 3108 PetscFunctionBegin; 3109 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3110 if (lf) *lf = dm->lf; 3111 PetscFunctionReturn(0); 3112 } 3113 3114 #undef __FUNCT__ 3115 #define __FUNCT__ "DMSetLocalFunction" 3116 /*@C 3117 DMSetLocalFunction - Set the local residual function from this DM 3118 3119 Not collective 3120 3121 Input Parameters: 3122 + dm - The DM 3123 - lf - The local residual function 3124 3125 Calling sequence of lf: 3126 $ lf (SNES snes, Vec x, Vec f, void *ctx); 3127 3128 + snes - the SNES context 3129 . x - local vector with the state at which to evaluate residual 3130 . f - local vector to put residual in 3131 - ctx - optional user-defined function context 3132 3133 Level: intermediate 3134 3135 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 3136 @*/ 3137 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 3138 { 3139 PetscFunctionBegin; 3140 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3141 dm->lf = lf; 3142 PetscFunctionReturn(0); 3143 } 3144 3145 #undef __FUNCT__ 3146 #define __FUNCT__ "DMGetLocalJacobian" 3147 /*@C 3148 DMGetLocalJacobian - Get the local Jacobian function from this DM 3149 3150 Not collective 3151 3152 Input Parameter: 3153 . dm - The DM 3154 3155 Output Parameter: 3156 . lj - The local Jacobian function 3157 3158 Calling sequence of lj: 3159 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3160 3161 + snes - the SNES context 3162 . x - local vector with the state at which to evaluate residual 3163 . J - matrix to put Jacobian in 3164 . M - matrix to use for defining Jacobian preconditioner 3165 - ctx - optional user-defined function context 3166 3167 Level: intermediate 3168 3169 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3170 @*/ 3171 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 3172 { 3173 PetscFunctionBegin; 3174 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3175 if (lj) *lj = dm->lj; 3176 PetscFunctionReturn(0); 3177 } 3178 3179 #undef __FUNCT__ 3180 #define __FUNCT__ "DMSetLocalJacobian" 3181 /*@C 3182 DMSetLocalJacobian - Set the local Jacobian function from this DM 3183 3184 Not collective 3185 3186 Input Parameters: 3187 + dm - The DM 3188 - lj - The local Jacobian function 3189 3190 Calling sequence of lj: 3191 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 3192 3193 + snes - the SNES context 3194 . x - local vector with the state at which to evaluate residual 3195 . J - matrix to put Jacobian in 3196 . M - matrix to use for defining Jacobian preconditioner 3197 - ctx - optional user-defined function context 3198 3199 Level: intermediate 3200 3201 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 3202 @*/ 3203 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 3204 { 3205 PetscFunctionBegin; 3206 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3207 dm->lj = lj; 3208 PetscFunctionReturn(0); 3209 } 3210 3211 #undef __FUNCT__ 3212 #define __FUNCT__ "DMGetDefaultSection" 3213 /*@ 3214 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 3215 3216 Input Parameter: 3217 . dm - The DM 3218 3219 Output Parameter: 3220 . section - The PetscSection 3221 3222 Level: intermediate 3223 3224 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3225 3226 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3227 @*/ 3228 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 3229 PetscFunctionBegin; 3230 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3231 PetscValidPointer(section, 2); 3232 *section = dm->defaultSection; 3233 PetscFunctionReturn(0); 3234 } 3235 3236 #undef __FUNCT__ 3237 #define __FUNCT__ "DMSetDefaultSection" 3238 /*@ 3239 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 3240 3241 Input Parameters: 3242 + dm - The DM 3243 - section - The PetscSection 3244 3245 Level: intermediate 3246 3247 Note: Any existing Section will be destroyed 3248 3249 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3250 @*/ 3251 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 3252 PetscInt numFields; 3253 PetscInt f; 3254 PetscErrorCode ierr; 3255 3256 PetscFunctionBegin; 3257 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3258 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 3259 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3260 dm->defaultSection = section; 3261 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 3262 if (numFields) { 3263 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 3264 for (f = 0; f < numFields; ++f) { 3265 const char *name; 3266 3267 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 3268 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 3269 } 3270 } 3271 PetscFunctionReturn(0); 3272 } 3273 3274 #undef __FUNCT__ 3275 #define __FUNCT__ "DMGetDefaultGlobalSection" 3276 /*@ 3277 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 3278 3279 Collective on DM 3280 3281 Input Parameter: 3282 . dm - The DM 3283 3284 Output Parameter: 3285 . section - The PetscSection 3286 3287 Level: intermediate 3288 3289 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3290 3291 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 3292 @*/ 3293 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 3294 PetscErrorCode ierr; 3295 3296 PetscFunctionBegin; 3297 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3298 PetscValidPointer(section, 2); 3299 if (!dm->defaultGlobalSection) { 3300 if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection"); 3301 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 3302 ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr); 3303 } 3304 *section = dm->defaultGlobalSection; 3305 PetscFunctionReturn(0); 3306 } 3307 3308 #undef __FUNCT__ 3309 #define __FUNCT__ "DMSetDefaultGlobalSection" 3310 /*@ 3311 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 3312 3313 Input Parameters: 3314 + dm - The DM 3315 - section - The PetscSection 3316 3317 Level: intermediate 3318 3319 Note: Any existing Section will be destroyed 3320 3321 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 3322 @*/ 3323 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) { 3324 PetscErrorCode ierr; 3325 3326 PetscFunctionBegin; 3327 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3328 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3329 dm->defaultGlobalSection = section; 3330 PetscFunctionReturn(0); 3331 } 3332 3333 #undef __FUNCT__ 3334 #define __FUNCT__ "DMGetDefaultSF" 3335 /*@ 3336 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3337 it is created from the default PetscSection layouts in the DM. 3338 3339 Input Parameter: 3340 . dm - The DM 3341 3342 Output Parameter: 3343 . sf - The PetscSF 3344 3345 Level: intermediate 3346 3347 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3348 3349 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3350 @*/ 3351 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3352 PetscInt nroots; 3353 PetscErrorCode ierr; 3354 3355 PetscFunctionBegin; 3356 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3357 PetscValidPointer(sf, 2); 3358 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3359 if (nroots < 0) { 3360 PetscSection section, gSection; 3361 3362 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3363 if (section) { 3364 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3365 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3366 } else { 3367 *sf = PETSC_NULL; 3368 PetscFunctionReturn(0); 3369 } 3370 } 3371 *sf = dm->defaultSF; 3372 PetscFunctionReturn(0); 3373 } 3374 3375 #undef __FUNCT__ 3376 #define __FUNCT__ "DMSetDefaultSF" 3377 /*@ 3378 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3379 3380 Input Parameters: 3381 + dm - The DM 3382 - sf - The PetscSF 3383 3384 Level: intermediate 3385 3386 Note: Any previous SF is destroyed 3387 3388 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3389 @*/ 3390 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3391 PetscErrorCode ierr; 3392 3393 PetscFunctionBegin; 3394 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3395 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3396 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3397 dm->defaultSF = sf; 3398 PetscFunctionReturn(0); 3399 } 3400 3401 #undef __FUNCT__ 3402 #define __FUNCT__ "DMCreateDefaultSF" 3403 /*@C 3404 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3405 describing the data layout. 3406 3407 Input Parameters: 3408 + dm - The DM 3409 . localSection - PetscSection describing the local data layout 3410 - globalSection - PetscSection describing the global data layout 3411 3412 Level: intermediate 3413 3414 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3415 @*/ 3416 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3417 { 3418 MPI_Comm comm = ((PetscObject) dm)->comm; 3419 PetscLayout layout; 3420 const PetscInt *ranges; 3421 PetscInt *local; 3422 PetscSFNode *remote; 3423 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3424 PetscMPIInt size, rank; 3425 PetscErrorCode ierr; 3426 3427 PetscFunctionBegin; 3428 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3429 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3430 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3431 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3432 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3433 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3434 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3435 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3436 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3437 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3438 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 3439 PetscInt gdof, gcdof; 3440 3441 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3442 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3443 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3444 } 3445 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3446 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3447 for (p = pStart, l = 0; p < pEnd; ++p) { 3448 const PetscInt *cind; 3449 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3450 3451 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3452 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3453 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3454 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3455 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3456 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3457 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3458 if (!gdof) continue; /* Censored point */ 3459 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3460 if (gsize != dof-cdof) { 3461 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof); 3462 cdof = 0; /* Ignore constraints */ 3463 } 3464 for (d = 0, c = 0; d < dof; ++d) { 3465 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3466 local[l+d-c] = off+d; 3467 } 3468 if (gdof < 0) { 3469 for(d = 0; d < gsize; ++d, ++l) { 3470 PetscInt offset = -(goff+1) + d, r; 3471 3472 ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr); 3473 if (r < 0) r = -(r+2); 3474 remote[l].rank = r; 3475 remote[l].index = offset - ranges[r]; 3476 } 3477 } else { 3478 for(d = 0; d < gsize; ++d, ++l) { 3479 remote[l].rank = rank; 3480 remote[l].index = goff+d - ranges[rank]; 3481 } 3482 } 3483 } 3484 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3485 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3486 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3487 PetscFunctionReturn(0); 3488 } 3489 3490 #undef __FUNCT__ 3491 #define __FUNCT__ "DMGetPointSF" 3492 /*@ 3493 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3494 3495 Input Parameter: 3496 . dm - The DM 3497 3498 Output Parameter: 3499 . sf - The PetscSF 3500 3501 Level: intermediate 3502 3503 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3504 3505 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3506 @*/ 3507 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) { 3508 PetscFunctionBegin; 3509 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3510 PetscValidPointer(sf, 2); 3511 *sf = dm->sf; 3512 PetscFunctionReturn(0); 3513 } 3514 3515 #undef __FUNCT__ 3516 #define __FUNCT__ "DMSetPointSF" 3517 /*@ 3518 DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM. 3519 3520 Input Parameters: 3521 + dm - The DM 3522 - sf - The PetscSF 3523 3524 Level: intermediate 3525 3526 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3527 @*/ 3528 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) { 3529 PetscErrorCode ierr; 3530 3531 PetscFunctionBegin; 3532 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3533 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1); 3534 ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr); 3535 ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr); 3536 dm->sf = sf; 3537 PetscFunctionReturn(0); 3538 } 3539 3540 #undef __FUNCT__ 3541 #define __FUNCT__ "DMGetNumFields" 3542 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3543 { 3544 PetscFunctionBegin; 3545 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3546 PetscValidPointer(numFields, 2); 3547 *numFields = dm->numFields; 3548 PetscFunctionReturn(0); 3549 } 3550 3551 #undef __FUNCT__ 3552 #define __FUNCT__ "DMSetNumFields" 3553 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3554 { 3555 PetscInt f; 3556 PetscErrorCode ierr; 3557 3558 PetscFunctionBegin; 3559 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3560 for (f = 0; f < dm->numFields; ++f) { 3561 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3562 } 3563 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3564 dm->numFields = numFields; 3565 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3566 for (f = 0; f < dm->numFields; ++f) { 3567 ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3568 } 3569 PetscFunctionReturn(0); 3570 } 3571 3572 #undef __FUNCT__ 3573 #define __FUNCT__ "DMGetField" 3574 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3575 { 3576 PetscFunctionBegin; 3577 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3578 PetscValidPointer(field, 2); 3579 if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3580 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3581 *field = dm->fields[f]; 3582 PetscFunctionReturn(0); 3583 } 3584 3585 #undef __FUNCT__ 3586 #define __FUNCT__ "DMSetCoordinates" 3587 /*@ 3588 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3589 3590 Collective on DM 3591 3592 Input Parameters: 3593 + dm - the DM 3594 - c - coordinate vector 3595 3596 Note: 3597 The coordinates do include those for ghost points, which are in the local vector 3598 3599 Level: intermediate 3600 3601 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3602 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3603 @*/ 3604 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3605 { 3606 PetscErrorCode ierr; 3607 3608 PetscFunctionBegin; 3609 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3610 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3611 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3612 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3613 dm->coordinates = c; 3614 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3615 PetscFunctionReturn(0); 3616 } 3617 3618 #undef __FUNCT__ 3619 #define __FUNCT__ "DMSetCoordinatesLocal" 3620 /*@ 3621 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3622 3623 Collective on DM 3624 3625 Input Parameters: 3626 + dm - the DM 3627 - c - coordinate vector 3628 3629 Note: 3630 The coordinates of ghost points can be set using DMSetCoordinates() 3631 followed by DMGetCoordinatesLocal(). This is intended to enable the 3632 setting of ghost coordinates outside of the domain. 3633 3634 Level: intermediate 3635 3636 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3637 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3638 @*/ 3639 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3640 { 3641 PetscErrorCode ierr; 3642 3643 PetscFunctionBegin; 3644 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3645 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3646 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3647 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3648 dm->coordinatesLocal = c; 3649 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3650 PetscFunctionReturn(0); 3651 } 3652 3653 #undef __FUNCT__ 3654 #define __FUNCT__ "DMGetCoordinates" 3655 /*@ 3656 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3657 3658 Not Collective 3659 3660 Input Parameter: 3661 . dm - the DM 3662 3663 Output Parameter: 3664 . c - global coordinate vector 3665 3666 Note: 3667 This is a borrowed reference, so the user should NOT destroy this vector 3668 3669 Each process has only the local coordinates (does NOT have the ghost coordinates). 3670 3671 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3672 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3673 3674 Level: intermediate 3675 3676 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3677 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3678 @*/ 3679 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3680 { 3681 PetscErrorCode ierr; 3682 3683 PetscFunctionBegin; 3684 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3685 PetscValidPointer(c,2); 3686 if (!dm->coordinates && dm->coordinatesLocal) { 3687 DM cdm; 3688 3689 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3690 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3691 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3692 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3693 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3694 } 3695 *c = dm->coordinates; 3696 PetscFunctionReturn(0); 3697 } 3698 3699 #undef __FUNCT__ 3700 #define __FUNCT__ "DMGetCoordinatesLocal" 3701 /*@ 3702 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3703 3704 Collective on DM 3705 3706 Input Parameter: 3707 . dm - the DM 3708 3709 Output Parameter: 3710 . c - coordinate vector 3711 3712 Note: 3713 This is a borrowed reference, so the user should NOT destroy this vector 3714 3715 Each process has the local and ghost coordinates 3716 3717 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3718 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3719 3720 Level: intermediate 3721 3722 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3723 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3724 @*/ 3725 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3726 { 3727 PetscErrorCode ierr; 3728 3729 PetscFunctionBegin; 3730 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3731 PetscValidPointer(c,2); 3732 if (!dm->coordinatesLocal && dm->coordinates) { 3733 DM cdm; 3734 3735 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3736 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3737 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3738 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3739 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3740 } 3741 *c = dm->coordinatesLocal; 3742 PetscFunctionReturn(0); 3743 } 3744 3745 #undef __FUNCT__ 3746 #define __FUNCT__ "DMGetCoordinateDM" 3747 /*@ 3748 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3749 3750 Collective on DM 3751 3752 Input Parameter: 3753 . dm - the DM 3754 3755 Output Parameter: 3756 . cdm - coordinate DM 3757 3758 Level: intermediate 3759 3760 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3761 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3762 @*/ 3763 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3764 { 3765 PetscErrorCode ierr; 3766 3767 PetscFunctionBegin; 3768 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3769 PetscValidPointer(cdm,2); 3770 if (!dm->coordinateDM) { 3771 if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3772 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3773 } 3774 *cdm = dm->coordinateDM; 3775 PetscFunctionReturn(0); 3776 } 3777