1 #include <petscsnes.h> 2 #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/ 3 4 PetscClassId DM_CLASSID; 5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal; 6 7 #undef __FUNCT__ 8 #define __FUNCT__ "DMCreate" 9 /*@ 10 DMCreate - Creates an empty DM object. The type can then be set with DMSetType(). 11 12 If you never call DMSetType() it will generate an 13 error when you try to use the vector. 14 15 Collective on MPI_Comm 16 17 Input Parameter: 18 . comm - The communicator for the DM object 19 20 Output Parameter: 21 . dm - The DM object 22 23 Level: beginner 24 25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE 26 @*/ 27 PetscErrorCode DMCreate(MPI_Comm comm,DM *dm) 28 { 29 DM v; 30 PetscErrorCode ierr; 31 32 PetscFunctionBegin; 33 PetscValidPointer(dm,2); 34 *dm = PETSC_NULL; 35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 36 ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); 37 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 38 ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr); 39 #endif 40 41 ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr); 42 ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr); 43 44 45 v->ltogmap = PETSC_NULL; 46 v->ltogmapb = PETSC_NULL; 47 v->bs = 1; 48 v->coloringtype = IS_COLORING_GLOBAL; 49 v->lf = PETSC_NULL; 50 v->lj = PETSC_NULL; 51 ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr); 52 ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr); 53 v->defaultSection = PETSC_NULL; 54 v->defaultGlobalSection = PETSC_NULL; 55 { 56 PetscInt i; 57 for (i = 0; i < 10; ++i) { 58 v->nullspaceConstructors[i] = PETSC_NULL; 59 } 60 } 61 v->numFields = 0; 62 v->fields = PETSC_NULL; 63 64 *dm = v; 65 PetscFunctionReturn(0); 66 } 67 68 69 #undef __FUNCT__ 70 #define __FUNCT__ "DMSetVecType" 71 /*@C 72 DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector() 73 74 Logically Collective on DMDA 75 76 Input Parameter: 77 + da - initial distributed array 78 . ctype - the vector type, currently either VECSTANDARD or VECCUSP 79 80 Options Database: 81 . -dm_vec_type ctype 82 83 Level: intermediate 84 85 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType 86 @*/ 87 PetscErrorCode DMSetVecType(DM da,const VecType ctype) 88 { 89 PetscErrorCode ierr; 90 91 PetscFunctionBegin; 92 PetscValidHeaderSpecific(da,DM_CLASSID,1); 93 ierr = PetscFree(da->vectype);CHKERRQ(ierr); 94 ierr = PetscStrallocpy(ctype,&da->vectype);CHKERRQ(ierr); 95 PetscFunctionReturn(0); 96 } 97 98 #undef __FUNCT__ 99 #define __FUNCT__ "DMSetMatType" 100 /*@C 101 DMSetMatType - Sets the type of matrix created with DMCreateMatrix() 102 103 Logically Collective on DM 104 105 Input Parameter: 106 + dm - the DM context 107 . ctype - the matrix type 108 109 Options Database: 110 . -dm_mat_type ctype 111 112 Level: intermediate 113 114 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType 115 @*/ 116 PetscErrorCode DMSetMatType(DM dm,const MatType ctype) 117 { 118 PetscErrorCode ierr; 119 PetscFunctionBegin; 120 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 121 ierr = PetscFree(dm->mattype);CHKERRQ(ierr); 122 ierr = PetscStrallocpy(ctype,&dm->mattype);CHKERRQ(ierr); 123 PetscFunctionReturn(0); 124 } 125 126 #undef __FUNCT__ 127 #define __FUNCT__ "DMSetOptionsPrefix" 128 /*@C 129 DMSetOptionsPrefix - Sets the prefix used for searching for all 130 DMDA options in the database. 131 132 Logically Collective on DMDA 133 134 Input Parameter: 135 + da - the DMDA context 136 - prefix - the prefix to prepend to all option names 137 138 Notes: 139 A hyphen (-) must NOT be given at the beginning of the prefix name. 140 The first character of all runtime options is AUTOMATICALLY the hyphen. 141 142 Level: advanced 143 144 .keywords: DMDA, set, options, prefix, database 145 146 .seealso: DMSetFromOptions() 147 @*/ 148 PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[]) 149 { 150 PetscErrorCode ierr; 151 152 PetscFunctionBegin; 153 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 154 ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr); 155 PetscFunctionReturn(0); 156 } 157 158 #undef __FUNCT__ 159 #define __FUNCT__ "DMDestroy" 160 /*@ 161 DMDestroy - Destroys a vector packer or DMDA. 162 163 Collective on DM 164 165 Input Parameter: 166 . dm - the DM object to destroy 167 168 Level: developer 169 170 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 171 172 @*/ 173 PetscErrorCode DMDestroy(DM *dm) 174 { 175 PetscInt i, cnt = 0, f; 176 DMNamedVecLink nlink,nnext; 177 PetscErrorCode ierr; 178 179 PetscFunctionBegin; 180 if (!*dm) PetscFunctionReturn(0); 181 PetscValidHeaderSpecific((*dm),DM_CLASSID,1); 182 183 /* count all the circular references of DM and its contained Vecs */ 184 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 185 if ((*dm)->localin[i]) {cnt++;} 186 if ((*dm)->globalin[i]) {cnt++;} 187 } 188 for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++; 189 if ((*dm)->x) { 190 PetscObject obj; 191 ierr = PetscObjectQuery((PetscObject)(*dm)->x,"DM",&obj);CHKERRQ(ierr); 192 if (obj == (PetscObject)*dm) cnt++; 193 } 194 195 if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);} 196 /* 197 Need this test because the dm references the vectors that 198 reference the dm, so destroying the dm calls destroy on the 199 vectors that cause another destroy on the dm 200 */ 201 if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0); 202 ((PetscObject) (*dm))->refct = 0; 203 for (i=0; i<DM_MAX_WORK_VECTORS; i++) { 204 if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()"); 205 ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr); 206 } 207 for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */ 208 nnext = nlink->next; 209 if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name); 210 ierr = PetscFree(nlink->name);CHKERRQ(ierr); 211 ierr = VecDestroy(&nlink->X);CHKERRQ(ierr); 212 ierr = PetscFree(nlink);CHKERRQ(ierr); 213 } 214 (*dm)->namedglobal = PETSC_NULL; 215 216 /* Destroy the list of hooks */ 217 { 218 DMCoarsenHookLink link,next; 219 for (link=(*dm)->coarsenhook; link; link=next) { 220 next = link->next; 221 ierr = PetscFree(link);CHKERRQ(ierr); 222 } 223 (*dm)->coarsenhook = PETSC_NULL; 224 } 225 { 226 DMRefineHookLink link,next; 227 for (link=(*dm)->refinehook; link; link=next) { 228 next = link->next; 229 ierr = PetscFree(link);CHKERRQ(ierr); 230 } 231 (*dm)->refinehook = PETSC_NULL; 232 } 233 /* Destroy the work arrays */ 234 { 235 DMWorkLink link,next; 236 if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out"); 237 for (link=(*dm)->workin; link; link=next) { 238 next = link->next; 239 ierr = PetscFree(link->mem);CHKERRQ(ierr); 240 ierr = PetscFree(link);CHKERRQ(ierr); 241 } 242 (*dm)->workin = PETSC_NULL; 243 } 244 245 if ((*dm)->ctx && (*dm)->ctxdestroy) { 246 ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr); 247 } 248 ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr); 249 ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr); 250 ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr); 251 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr); 252 ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr); 253 ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr); 254 ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr); 255 256 ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr); 257 ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr); 258 ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr); 259 ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr); 260 261 ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr); 262 ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr); 263 ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr); 264 265 for (f = 0; f < (*dm)->numFields; ++f) { 266 ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr); 267 } 268 ierr = PetscFree((*dm)->fields);CHKERRQ(ierr); 269 /* if memory was published with AMS then destroy it */ 270 ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr); 271 272 ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr); 273 /* We do not destroy (*dm)->data here so that we can reference count backend objects */ 274 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 275 PetscFunctionReturn(0); 276 } 277 278 #undef __FUNCT__ 279 #define __FUNCT__ "DMSetUp" 280 /*@ 281 DMSetUp - sets up the data structures inside a DM object 282 283 Collective on DM 284 285 Input Parameter: 286 . dm - the DM object to setup 287 288 Level: developer 289 290 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 291 292 @*/ 293 PetscErrorCode DMSetUp(DM dm) 294 { 295 PetscErrorCode ierr; 296 297 PetscFunctionBegin; 298 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 299 if (dm->setupcalled) PetscFunctionReturn(0); 300 if (dm->ops->setup) { 301 ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr); 302 } 303 dm->setupcalled = PETSC_TRUE; 304 PetscFunctionReturn(0); 305 } 306 307 #undef __FUNCT__ 308 #define __FUNCT__ "DMSetFromOptions" 309 /*@ 310 DMSetFromOptions - sets parameters in a DM from the options database 311 312 Collective on DM 313 314 Input Parameter: 315 . dm - the DM object to set options for 316 317 Options Database: 318 + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros 319 . -dm_vec_type <type> type of vector to create inside DM 320 . -dm_mat_type <type> type of matrix to create inside DM 321 - -dm_coloring_type <global or ghosted> 322 323 Level: developer 324 325 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 326 327 @*/ 328 PetscErrorCode DMSetFromOptions(DM dm) 329 { 330 PetscBool flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg; 331 PetscErrorCode ierr; 332 char typeName[256] = MATAIJ; 333 334 PetscFunctionBegin; 335 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 336 ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr); 337 ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr); 338 ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr); 339 ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr); 340 ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr); 341 ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr); 342 ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr); 343 if (flg) { 344 ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr); 345 } 346 ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr); 347 if (flg) { 348 ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr); 349 } 350 ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr); 351 if (dm->ops->setfromoptions) { 352 ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr); 353 } 354 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 355 ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr); 356 ierr = PetscOptionsEnd();CHKERRQ(ierr); 357 if (flg1) { 358 ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 359 } 360 if (flg2) { 361 PetscViewer viewer; 362 363 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 364 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 365 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr); 366 ierr = DMView(dm, viewer);CHKERRQ(ierr); 367 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 368 } 369 if (flg3) { 370 PetscViewer viewer; 371 372 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 373 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 374 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); 375 ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr); 376 ierr = DMView(dm, viewer);CHKERRQ(ierr); 377 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 378 } 379 if (flg4) { 380 PetscViewer viewer; 381 382 ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr); 383 ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr); 384 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr); 385 ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr); 386 ierr = DMView(dm, viewer);CHKERRQ(ierr); 387 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 388 } 389 PetscFunctionReturn(0); 390 } 391 392 #undef __FUNCT__ 393 #define __FUNCT__ "DMView" 394 /*@C 395 DMView - Views a vector packer or DMDA. 396 397 Collective on DM 398 399 Input Parameter: 400 + dm - the DM object to view 401 - v - the viewer 402 403 Level: developer 404 405 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 406 407 @*/ 408 PetscErrorCode DMView(DM dm,PetscViewer v) 409 { 410 PetscErrorCode ierr; 411 412 PetscFunctionBegin; 413 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 414 if (!v) { 415 ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr); 416 } 417 if (dm->ops->view) { 418 ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr); 419 } 420 PetscFunctionReturn(0); 421 } 422 423 #undef __FUNCT__ 424 #define __FUNCT__ "DMCreateGlobalVector" 425 /*@ 426 DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object 427 428 Collective on DM 429 430 Input Parameter: 431 . dm - the DM object 432 433 Output Parameter: 434 . vec - the global vector 435 436 Level: beginner 437 438 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 439 440 @*/ 441 PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec) 442 { 443 PetscErrorCode ierr; 444 445 PetscFunctionBegin; 446 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 447 if (dm->defaultSection) { 448 PetscSection gSection; 449 PetscInt localSize; 450 451 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 452 ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr); 453 ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr); 454 ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr); 455 /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */ 456 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 457 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 458 /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */ 459 /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */ 460 /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */ 461 } else { 462 ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr); 463 } 464 PetscFunctionReturn(0); 465 } 466 467 #undef __FUNCT__ 468 #define __FUNCT__ "DMCreateLocalVector" 469 /*@ 470 DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object 471 472 Not Collective 473 474 Input Parameter: 475 . dm - the DM object 476 477 Output Parameter: 478 . vec - the local vector 479 480 Level: beginner 481 482 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 483 484 @*/ 485 PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec) 486 { 487 PetscErrorCode ierr; 488 489 PetscFunctionBegin; 490 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 491 if (dm->defaultSection) { 492 PetscInt localSize; 493 494 ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr); 495 ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr); 496 ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr); 497 ierr = VecSetFromOptions(*vec);CHKERRQ(ierr); 498 ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr); 499 } else { 500 ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr); 501 } 502 PetscFunctionReturn(0); 503 } 504 505 #undef __FUNCT__ 506 #define __FUNCT__ "DMGetLocalToGlobalMapping" 507 /*@ 508 DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM. 509 510 Collective on DM 511 512 Input Parameter: 513 . dm - the DM that provides the mapping 514 515 Output Parameter: 516 . ltog - the mapping 517 518 Level: intermediate 519 520 Notes: 521 This mapping can then be used by VecSetLocalToGlobalMapping() or 522 MatSetLocalToGlobalMapping(). 523 524 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock() 525 @*/ 526 PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog) 527 { 528 PetscErrorCode ierr; 529 530 PetscFunctionBegin; 531 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 532 PetscValidPointer(ltog,2); 533 if (!dm->ltogmap) { 534 PetscSection section, sectionGlobal; 535 536 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 537 if (section) { 538 PetscInt *ltog; 539 PetscInt pStart, pEnd, size, p, l; 540 541 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 542 ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr); 543 ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr); 544 ierr = PetscMalloc(size * sizeof(PetscInt), <og);CHKERRQ(ierr); /* We want the local+overlap size */ 545 for (p = pStart, l = 0; p < pEnd; ++p) { 546 PetscInt dof, off, c; 547 548 /* Should probably use constrained dofs */ 549 ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr); 550 ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr); 551 for (c = 0; c < dof; ++c, ++l) { 552 ltog[l] = off+c; 553 } 554 } 555 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr); 556 ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr); 557 } else { 558 if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping"); 559 ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr); 560 } 561 } 562 *ltog = dm->ltogmap; 563 PetscFunctionReturn(0); 564 } 565 566 #undef __FUNCT__ 567 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock" 568 /*@ 569 DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM. 570 571 Collective on DM 572 573 Input Parameter: 574 . da - the distributed array that provides the mapping 575 576 Output Parameter: 577 . ltog - the block mapping 578 579 Level: intermediate 580 581 Notes: 582 This mapping can then be used by VecSetLocalToGlobalMappingBlock() or 583 MatSetLocalToGlobalMappingBlock(). 584 585 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize() 586 @*/ 587 PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog) 588 { 589 PetscErrorCode ierr; 590 591 PetscFunctionBegin; 592 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 593 PetscValidPointer(ltog,2); 594 if (!dm->ltogmapb) { 595 PetscInt bs; 596 ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr); 597 if (bs > 1) { 598 if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock"); 599 ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr); 600 } else { 601 ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr); 602 ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr); 603 } 604 } 605 *ltog = dm->ltogmapb; 606 PetscFunctionReturn(0); 607 } 608 609 #undef __FUNCT__ 610 #define __FUNCT__ "DMGetBlockSize" 611 /*@ 612 DMGetBlockSize - Gets the inherent block size associated with a DM 613 614 Not Collective 615 616 Input Parameter: 617 . dm - the DM with block structure 618 619 Output Parameter: 620 . bs - the block size, 1 implies no exploitable block structure 621 622 Level: intermediate 623 624 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock() 625 @*/ 626 PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs) 627 { 628 PetscFunctionBegin; 629 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 630 PetscValidPointer(bs,2); 631 if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet"); 632 *bs = dm->bs; 633 PetscFunctionReturn(0); 634 } 635 636 #undef __FUNCT__ 637 #define __FUNCT__ "DMCreateInterpolation" 638 /*@ 639 DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects 640 641 Collective on DM 642 643 Input Parameter: 644 + dm1 - the DM object 645 - dm2 - the second, finer DM object 646 647 Output Parameter: 648 + mat - the interpolation 649 - vec - the scaling (optional) 650 651 Level: developer 652 653 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 654 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation. 655 656 For DMDA objects you can use this interpolation (more precisely the interpolation from the DMDAGetCoordinateDA()) to interpolate the mesh coordinate vectors 657 EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic. 658 659 660 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen() 661 662 @*/ 663 PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec) 664 { 665 PetscErrorCode ierr; 666 667 PetscFunctionBegin; 668 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 669 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 670 ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr); 671 PetscFunctionReturn(0); 672 } 673 674 #undef __FUNCT__ 675 #define __FUNCT__ "DMCreateInjection" 676 /*@ 677 DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects 678 679 Collective on DM 680 681 Input Parameter: 682 + dm1 - the DM object 683 - dm2 - the second, finer DM object 684 685 Output Parameter: 686 . ctx - the injection 687 688 Level: developer 689 690 Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by 691 DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection. 692 693 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation() 694 695 @*/ 696 PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx) 697 { 698 PetscErrorCode ierr; 699 700 PetscFunctionBegin; 701 PetscValidHeaderSpecific(dm1,DM_CLASSID,1); 702 PetscValidHeaderSpecific(dm2,DM_CLASSID,2); 703 ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr); 704 PetscFunctionReturn(0); 705 } 706 707 #undef __FUNCT__ 708 #define __FUNCT__ "DMCreateColoring" 709 /*@C 710 DMCreateColoring - Gets coloring for a DMDA or DMComposite 711 712 Collective on DM 713 714 Input Parameter: 715 + dm - the DM object 716 . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL 717 - matype - either MATAIJ or MATBAIJ 718 719 Output Parameter: 720 . coloring - the coloring 721 722 Level: developer 723 724 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix() 725 726 @*/ 727 PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring) 728 { 729 PetscErrorCode ierr; 730 731 PetscFunctionBegin; 732 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 733 if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet"); 734 ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr); 735 PetscFunctionReturn(0); 736 } 737 738 #undef __FUNCT__ 739 #define __FUNCT__ "DMCreateMatrix" 740 /*@C 741 DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite 742 743 Collective on DM 744 745 Input Parameter: 746 + dm - the DM object 747 - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or 748 any type which inherits from one of these (such as MATAIJ) 749 750 Output Parameter: 751 . mat - the empty Jacobian 752 753 Level: beginner 754 755 Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 756 do not need to do it yourself. 757 758 By default it also sets the nonzero structure and puts in the zero entries. To prevent setting 759 the nonzero pattern call DMDASetMatPreallocateOnly() 760 761 For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used 762 internally by PETSc. 763 764 For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires 765 the indices for the global numbering for DMDAs which is complicated. 766 767 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 768 769 @*/ 770 PetscErrorCode DMCreateMatrix(DM dm,const MatType mtype,Mat *mat) 771 { 772 PetscErrorCode ierr; 773 774 PetscFunctionBegin; 775 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 776 #ifndef PETSC_USE_DYNAMIC_LIBRARIES 777 ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr); 778 #endif 779 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 780 PetscValidPointer(mat,3); 781 if (dm->mattype) { 782 ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr); 783 } else { 784 ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr); 785 } 786 PetscFunctionReturn(0); 787 } 788 789 #undef __FUNCT__ 790 #define __FUNCT__ "DMSetMatrixPreallocateOnly" 791 /*@ 792 DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly 793 preallocated but the nonzero structure and zero values will not be set. 794 795 Logically Collective on DMDA 796 797 Input Parameter: 798 + dm - the DM 799 - only - PETSC_TRUE if only want preallocation 800 801 Level: developer 802 .seealso DMCreateMatrix() 803 @*/ 804 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only) 805 { 806 PetscFunctionBegin; 807 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 808 dm->prealloc_only = only; 809 PetscFunctionReturn(0); 810 } 811 812 #undef __FUNCT__ 813 #define __FUNCT__ "DMGetWorkArray" 814 /*@C 815 DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 816 817 Not Collective 818 819 Input Parameters: 820 + dm - the DM object 821 . count - The minium size 822 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 823 824 Output Parameter: 825 . array - the work array 826 827 Level: developer 828 829 .seealso DMDestroy(), DMCreate() 830 @*/ 831 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 832 { 833 PetscErrorCode ierr; 834 DMWorkLink link; 835 size_t size; 836 837 PetscFunctionBegin; 838 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 839 PetscValidPointer(mem,4); 840 if (dm->workin) { 841 link = dm->workin; 842 dm->workin = dm->workin->next; 843 } else { 844 ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr); 845 } 846 ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr); 847 if (size*count > link->bytes) { 848 ierr = PetscFree(link->mem);CHKERRQ(ierr); 849 ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr); 850 link->bytes = size*count; 851 } 852 link->next = dm->workout; 853 dm->workout = link; 854 *(void**)mem = link->mem; 855 PetscFunctionReturn(0); 856 } 857 858 #undef __FUNCT__ 859 #define __FUNCT__ "DMRestoreWorkArray" 860 /*@C 861 DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray() 862 863 Not Collective 864 865 Input Parameters: 866 + dm - the DM object 867 . count - The minium size 868 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT) 869 870 Output Parameter: 871 . array - the work array 872 873 Level: developer 874 875 .seealso DMDestroy(), DMCreate() 876 @*/ 877 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem) 878 { 879 DMWorkLink *p,link; 880 881 PetscFunctionBegin; 882 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 883 PetscValidPointer(mem,4); 884 for (p=&dm->workout; (link=*p); p=&link->next) { 885 if (link->mem == *(void**)mem) { 886 *p = link->next; 887 link->next = dm->workin; 888 dm->workin = link; 889 *(void**)mem = PETSC_NULL; 890 PetscFunctionReturn(0); 891 } 892 } 893 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out"); 894 PetscFunctionReturn(0); 895 } 896 897 #undef __FUNCT__ 898 #define __FUNCT__ "DMSetNullSpaceConstructor" 899 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace)) 900 { 901 PetscFunctionBegin; 902 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 903 if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field); 904 dm->nullspaceConstructors[field] = nullsp; 905 PetscFunctionReturn(0); 906 } 907 908 #undef __FUNCT__ 909 #define __FUNCT__ "DMCreateFieldIS" 910 /*@C 911 DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field 912 913 Not collective 914 915 Input Parameter: 916 . dm - the DM object 917 918 Output Parameters: 919 + numFields - The number of fields (or PETSC_NULL if not requested) 920 . fieldNames - The name for each field (or PETSC_NULL if not requested) 921 - fields - The global indices for each field (or PETSC_NULL if not requested) 922 923 Level: intermediate 924 925 Notes: 926 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 927 PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with 928 PetscFree(). 929 930 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix() 931 @*/ 932 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields) 933 { 934 PetscSection section, sectionGlobal; 935 PetscErrorCode ierr; 936 937 PetscFunctionBegin; 938 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 939 if (numFields) { 940 PetscValidPointer(numFields,2); 941 *numFields = 0; 942 } 943 if (fieldNames) { 944 PetscValidPointer(fieldNames,3); 945 *fieldNames = PETSC_NULL; 946 } 947 if (fields) { 948 PetscValidPointer(fields,4); 949 *fields = PETSC_NULL; 950 } 951 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 952 if (section) { 953 PetscInt *fieldSizes, **fieldIndices; 954 PetscInt nF, f, pStart, pEnd, p; 955 956 ierr = DMGetDefaultGlobalSection(dm, §ionGlobal);CHKERRQ(ierr); 957 ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr); 958 ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr); 959 ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr); 960 for (f = 0; f < nF; ++f) { 961 fieldSizes[f] = 0; 962 } 963 for (p = pStart; p < pEnd; ++p) { 964 PetscInt gdof; 965 966 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 967 if (gdof > 0) { 968 for (f = 0; f < nF; ++f) { 969 PetscInt fdof, fcdof; 970 971 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 972 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 973 fieldSizes[f] += fdof-fcdof; 974 } 975 } 976 } 977 for (f = 0; f < nF; ++f) { 978 ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr); 979 fieldSizes[f] = 0; 980 } 981 for (p = pStart; p < pEnd; ++p) { 982 PetscInt gdof, goff; 983 984 ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr); 985 if (gdof > 0) { 986 ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr); 987 for (f = 0; f < nF; ++f) { 988 PetscInt fdof, fcdof, fc; 989 990 ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr); 991 ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr); 992 for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) { 993 fieldIndices[f][fieldSizes[f]] = goff++; 994 } 995 } 996 } 997 } 998 if (numFields) {*numFields = nF;} 999 if (fieldNames) { 1000 ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr); 1001 for (f = 0; f < nF; ++f) { 1002 const char *fieldName; 1003 1004 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1005 ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr); 1006 } 1007 } 1008 if (fields) { 1009 ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr); 1010 for (f = 0; f < nF; ++f) { 1011 ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr); 1012 } 1013 } 1014 ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr); 1015 } else { 1016 if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);} 1017 } 1018 PetscFunctionReturn(0); 1019 } 1020 1021 #undef __FUNCT__ 1022 #define __FUNCT__ "DMCreateFieldDecompositionDM" 1023 /*@C 1024 DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields. 1025 1026 Not Collective 1027 1028 Input Parameters: 1029 + dm - the DM object 1030 - name - the name of the field decomposition 1031 1032 Output Parameter: 1033 . ddm - the field decomposition DM (PETSC_NULL, if no such decomposition is known) 1034 1035 Level: advanced 1036 1037 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1038 @*/ 1039 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm) 1040 { 1041 PetscErrorCode ierr; 1042 1043 PetscFunctionBegin; 1044 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1045 PetscValidCharPointer(name,2); 1046 PetscValidPointer(ddm,3); 1047 *ddm = PETSC_NULL; 1048 if (dm->ops->createfielddecompositiondm) { 1049 ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1050 } 1051 PetscFunctionReturn(0); 1052 } 1053 1054 #undef __FUNCT__ 1055 #define __FUNCT__ "DMCreateFieldDecomposition" 1056 /*@C 1057 DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems 1058 corresponding to different fields: each IS contains the global indices of the dofs of the 1059 corresponding field. The optional list of DMs define the DM for each subproblem. 1060 Generalizes DMCreateFieldIS(). 1061 1062 Not collective 1063 1064 Input Parameter: 1065 . dm - the DM object 1066 1067 Output Parameters: 1068 + len - The number of subproblems in the field decomposition (or PETSC_NULL if not requested) 1069 . namelist - The name for each field (or PETSC_NULL if not requested) 1070 . islist - The global indices for each field (or PETSC_NULL if not requested) 1071 - dmlist - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1072 1073 Level: intermediate 1074 1075 Notes: 1076 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1077 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1078 and all of the arrays should be freed with PetscFree(). 1079 1080 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1081 @*/ 1082 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist) 1083 { 1084 PetscErrorCode ierr; 1085 1086 PetscFunctionBegin; 1087 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1088 if (len) {PetscValidPointer(len,2); *len = 0;} 1089 if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;} 1090 if (islist) {PetscValidPointer(islist,4); *islist = 0;} 1091 if (dmlist) {PetscValidPointer(dmlist,5); *dmlist = 0;} 1092 if (!dm->ops->createfielddecomposition) { 1093 PetscSection section; 1094 PetscInt numFields, f; 1095 1096 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 1097 if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);} 1098 if (section && numFields && dm->ops->createsubdm) { 1099 *len = numFields; 1100 ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr); 1101 for (f = 0; f < numFields; ++f) { 1102 const char *fieldName; 1103 1104 ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr); 1105 ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr); 1106 ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr); 1107 } 1108 } else { 1109 ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr); 1110 /* By default there are no DMs associated with subproblems. */ 1111 if (dmlist) *dmlist = PETSC_NULL; 1112 } 1113 } 1114 else { 1115 ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr); 1116 } 1117 PetscFunctionReturn(0); 1118 } 1119 1120 #undef __FUNCT__ 1121 #define __FUNCT__ "DMCreateSubDM" 1122 /*@C 1123 DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in. 1124 The fields are defined by DMCreateFieldIS(). 1125 1126 Not collective 1127 1128 Input Parameters: 1129 + dm - the DM object 1130 . numFields - number of fields in this subproblem 1131 - len - The number of subproblems in the decomposition (or PETSC_NULL if not requested) 1132 1133 Output Parameters: 1134 . is - The global indices for the subproblem 1135 - dm - The DM for the subproblem 1136 1137 Level: intermediate 1138 1139 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS() 1140 @*/ 1141 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm) 1142 { 1143 PetscErrorCode ierr; 1144 1145 PetscFunctionBegin; 1146 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1147 PetscValidPointer(fields,3); 1148 if (is) {PetscValidPointer(is,4);} 1149 if (subdm) {PetscValidPointer(subdm,5);} 1150 if (dm->ops->createsubdm) { 1151 ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr); 1152 } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined"); 1153 PetscFunctionReturn(0); 1154 } 1155 1156 #undef __FUNCT__ 1157 #define __FUNCT__ "DMCreateDomainDecompositionDM" 1158 /*@C 1159 DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains. 1160 1161 Not Collective 1162 1163 Input Parameters: 1164 + dm - the DM object 1165 - name - the name of the subdomain decomposition 1166 1167 Output Parameter: 1168 . ddm - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known) 1169 1170 Level: advanced 1171 1172 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM() 1173 @*/ 1174 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm) 1175 { 1176 PetscErrorCode ierr; 1177 1178 PetscFunctionBegin; 1179 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1180 PetscValidCharPointer(name,2); 1181 PetscValidPointer(ddm,3); 1182 *ddm = PETSC_NULL; 1183 if (dm->ops->createdomaindecompositiondm) { 1184 ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr); 1185 } 1186 PetscFunctionReturn(0); 1187 } 1188 1189 #undef __FUNCT__ 1190 #define __FUNCT__ "DMCreateDomainDecomposition" 1191 /*@C 1192 DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems 1193 corresponding to restrictions to pairs nested subdomains: each IS contains the global 1194 indices of the dofs of the corresponding subdomains. The inner subdomains conceptually 1195 define a nonoverlapping covering, while outer subdomains can overlap. 1196 The optional list of DMs define the DM for each subproblem. 1197 1198 Not collective 1199 1200 Input Parameter: 1201 . dm - the DM object 1202 1203 Output Parameters: 1204 + len - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested) 1205 . namelist - The name for each subdomain (or PETSC_NULL if not requested) 1206 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested) 1207 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested) 1208 - dmlist - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined) 1209 1210 Level: intermediate 1211 1212 Notes: 1213 The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with 1214 PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(), 1215 and all of the arrays should be freed with PetscFree(). 1216 1217 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition() 1218 @*/ 1219 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist) 1220 { 1221 PetscErrorCode ierr; 1222 1223 PetscFunctionBegin; 1224 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1225 if (len) {PetscValidPointer(len,2); *len = PETSC_NULL;} 1226 if (namelist) {PetscValidPointer(namelist,3); *namelist = PETSC_NULL;} 1227 if (innerislist) {PetscValidPointer(innerislist,4); *innerislist = PETSC_NULL;} 1228 if (outerislist) {PetscValidPointer(outerislist,5); *outerislist = PETSC_NULL;} 1229 if (dmlist) {PetscValidPointer(dmlist,6); *dmlist = PETSC_NULL;} 1230 if (dm->ops->createdomaindecomposition) { 1231 ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr); 1232 } 1233 PetscFunctionReturn(0); 1234 } 1235 1236 #undef __FUNCT__ 1237 #define __FUNCT__ "DMRefine" 1238 /*@ 1239 DMRefine - Refines a DM object 1240 1241 Collective on DM 1242 1243 Input Parameter: 1244 + dm - the DM object 1245 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1246 1247 Output Parameter: 1248 . dmf - the refined DM, or PETSC_NULL 1249 1250 Note: If no refinement was done, the return value is PETSC_NULL 1251 1252 Level: developer 1253 1254 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1255 @*/ 1256 PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf) 1257 { 1258 PetscErrorCode ierr; 1259 DMRefineHookLink link; 1260 1261 PetscFunctionBegin; 1262 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1263 ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr); 1264 if (*dmf) { 1265 (*dmf)->ops->creatematrix = dm->ops->creatematrix; 1266 (*dmf)->ops->initialguess = dm->ops->initialguess; 1267 (*dmf)->ops->function = dm->ops->function; 1268 (*dmf)->ops->functionj = dm->ops->functionj; 1269 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1270 (*dmf)->ops->jacobian = dm->ops->jacobian; 1271 } 1272 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr); 1273 (*dmf)->ctx = dm->ctx; 1274 (*dmf)->leveldown = dm->leveldown; 1275 (*dmf)->levelup = dm->levelup + 1; 1276 ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr); 1277 for (link=dm->refinehook; link; link=link->next) { 1278 if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);} 1279 } 1280 } 1281 PetscFunctionReturn(0); 1282 } 1283 1284 #undef __FUNCT__ 1285 #define __FUNCT__ "DMRefineHookAdd" 1286 /*@ 1287 DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid 1288 1289 Logically Collective 1290 1291 Input Arguments: 1292 + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level 1293 . refinehook - function to run when setting up a coarser level 1294 . interphook - function to run to update data on finer levels (once per SNESSolve()) 1295 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1296 1297 Calling sequence of refinehook: 1298 $ refinehook(DM coarse,DM fine,void *ctx); 1299 1300 + coarse - coarse level DM 1301 . fine - fine level DM to interpolate problem to 1302 - ctx - optional user-defined function context 1303 1304 Calling sequence for interphook: 1305 $ interphook(DM coarse,Mat interp,DM fine,void *ctx) 1306 1307 + coarse - coarse level DM 1308 . interp - matrix interpolating a coarse-level solution to the finer grid 1309 . fine - fine level DM to update 1310 - ctx - optional user-defined function context 1311 1312 Level: advanced 1313 1314 Notes: 1315 This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing 1316 1317 If this function is called multiple times, the hooks will be run in the order they are added. 1318 1319 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1320 @*/ 1321 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx) 1322 { 1323 PetscErrorCode ierr; 1324 DMRefineHookLink link,*p; 1325 1326 PetscFunctionBegin; 1327 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1328 for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1329 ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr); 1330 link->refinehook = refinehook; 1331 link->interphook = interphook; 1332 link->ctx = ctx; 1333 link->next = PETSC_NULL; 1334 *p = link; 1335 PetscFunctionReturn(0); 1336 } 1337 1338 #undef __FUNCT__ 1339 #define __FUNCT__ "DMInterpolate" 1340 /*@ 1341 DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd() 1342 1343 Collective if any hooks are 1344 1345 Input Arguments: 1346 + coarse - coarser DM to use as a base 1347 . restrct - interpolation matrix, apply using MatInterpolate() 1348 - fine - finer DM to update 1349 1350 Level: developer 1351 1352 .seealso: DMRefineHookAdd(), MatInterpolate() 1353 @*/ 1354 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine) 1355 { 1356 PetscErrorCode ierr; 1357 DMRefineHookLink link; 1358 1359 PetscFunctionBegin; 1360 for (link=fine->refinehook; link; link=link->next) { 1361 if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);} 1362 } 1363 PetscFunctionReturn(0); 1364 } 1365 1366 #undef __FUNCT__ 1367 #define __FUNCT__ "DMGetRefineLevel" 1368 /*@ 1369 DMGetRefineLevel - Get's the number of refinements that have generated this DM. 1370 1371 Not Collective 1372 1373 Input Parameter: 1374 . dm - the DM object 1375 1376 Output Parameter: 1377 . level - number of refinements 1378 1379 Level: developer 1380 1381 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1382 1383 @*/ 1384 PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level) 1385 { 1386 PetscFunctionBegin; 1387 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1388 *level = dm->levelup; 1389 PetscFunctionReturn(0); 1390 } 1391 1392 #undef __FUNCT__ 1393 #define __FUNCT__ "DMGlobalToLocalBegin" 1394 /*@ 1395 DMGlobalToLocalBegin - Begins updating local vectors from global vector 1396 1397 Neighbor-wise Collective on DM 1398 1399 Input Parameters: 1400 + dm - the DM object 1401 . g - the global vector 1402 . mode - INSERT_VALUES or ADD_VALUES 1403 - l - the local vector 1404 1405 1406 Level: beginner 1407 1408 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1409 1410 @*/ 1411 PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l) 1412 { 1413 PetscSF sf; 1414 PetscErrorCode ierr; 1415 1416 PetscFunctionBegin; 1417 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1418 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1419 if (sf) { 1420 PetscScalar *lArray, *gArray; 1421 1422 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1423 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1424 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1425 ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1426 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1427 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1428 } else { 1429 ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1430 } 1431 PetscFunctionReturn(0); 1432 } 1433 1434 #undef __FUNCT__ 1435 #define __FUNCT__ "DMGlobalToLocalEnd" 1436 /*@ 1437 DMGlobalToLocalEnd - Ends updating local vectors from global vector 1438 1439 Neighbor-wise Collective on DM 1440 1441 Input Parameters: 1442 + dm - the DM object 1443 . g - the global vector 1444 . mode - INSERT_VALUES or ADD_VALUES 1445 - l - the local vector 1446 1447 1448 Level: beginner 1449 1450 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin() 1451 1452 @*/ 1453 PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l) 1454 { 1455 PetscSF sf; 1456 PetscErrorCode ierr; 1457 PetscScalar *lArray, *gArray; 1458 1459 PetscFunctionBegin; 1460 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1461 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1462 if (sf) { 1463 if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1464 1465 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1466 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1467 ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr); 1468 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1469 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1470 } else { 1471 ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr); 1472 } 1473 PetscFunctionReturn(0); 1474 } 1475 1476 #undef __FUNCT__ 1477 #define __FUNCT__ "DMLocalToGlobalBegin" 1478 /*@ 1479 DMLocalToGlobalBegin - updates global vectors from local vectors 1480 1481 Neighbor-wise Collective on DM 1482 1483 Input Parameters: 1484 + dm - the DM object 1485 . l - the local vector 1486 . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that 1487 base point. 1488 - - the global vector 1489 1490 Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local 1491 array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work 1492 global array to the final global array with VecAXPY(). 1493 1494 Level: beginner 1495 1496 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin() 1497 1498 @*/ 1499 PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g) 1500 { 1501 PetscSF sf; 1502 PetscErrorCode ierr; 1503 1504 PetscFunctionBegin; 1505 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1506 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1507 if (sf) { 1508 MPI_Op op; 1509 PetscScalar *lArray, *gArray; 1510 1511 switch(mode) { 1512 case INSERT_VALUES: 1513 case INSERT_ALL_VALUES: 1514 #if defined(PETSC_HAVE_MPI_REPLACE) 1515 op = MPI_REPLACE; break; 1516 #else 1517 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1518 #endif 1519 case ADD_VALUES: 1520 case ADD_ALL_VALUES: 1521 op = MPI_SUM; break; 1522 default: 1523 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1524 } 1525 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1526 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1527 ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1528 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1529 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1530 } else { 1531 ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1532 } 1533 PetscFunctionReturn(0); 1534 } 1535 1536 #undef __FUNCT__ 1537 #define __FUNCT__ "DMLocalToGlobalEnd" 1538 /*@ 1539 DMLocalToGlobalEnd - updates global vectors from local vectors 1540 1541 Neighbor-wise Collective on DM 1542 1543 Input Parameters: 1544 + dm - the DM object 1545 . l - the local vector 1546 . mode - INSERT_VALUES or ADD_VALUES 1547 - g - the global vector 1548 1549 1550 Level: beginner 1551 1552 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd() 1553 1554 @*/ 1555 PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g) 1556 { 1557 PetscSF sf; 1558 PetscErrorCode ierr; 1559 1560 PetscFunctionBegin; 1561 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1562 ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr); 1563 if (sf) { 1564 MPI_Op op; 1565 PetscScalar *lArray, *gArray; 1566 1567 switch(mode) { 1568 case INSERT_VALUES: 1569 case INSERT_ALL_VALUES: 1570 #if defined(PETSC_HAVE_MPI_REPLACE) 1571 op = MPI_REPLACE; break; 1572 #else 1573 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation"); 1574 #endif 1575 case ADD_VALUES: 1576 case ADD_ALL_VALUES: 1577 op = MPI_SUM; break; 1578 default: 1579 SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode); 1580 } 1581 ierr = VecGetArray(l, &lArray);CHKERRQ(ierr); 1582 ierr = VecGetArray(g, &gArray);CHKERRQ(ierr); 1583 ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr); 1584 ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr); 1585 ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr); 1586 } else { 1587 ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr); 1588 } 1589 PetscFunctionReturn(0); 1590 } 1591 1592 #undef __FUNCT__ 1593 #define __FUNCT__ "DMComputeJacobianDefault" 1594 /*@ 1595 DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided 1596 1597 Collective on DM 1598 1599 Input Parameter: 1600 + dm - the DM object 1601 . x - location to compute Jacobian at; may be ignored for linear problems 1602 . A - matrix that defines the operator for the linear solve 1603 - B - the matrix used to construct the preconditioner 1604 1605 Level: developer 1606 1607 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 1608 DMSetFunction() 1609 1610 @*/ 1611 PetscErrorCode DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 1612 { 1613 PetscErrorCode ierr; 1614 1615 PetscFunctionBegin; 1616 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1617 *stflag = SAME_NONZERO_PATTERN; 1618 ierr = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr); 1619 if (A != B) { 1620 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1621 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1622 } 1623 PetscFunctionReturn(0); 1624 } 1625 1626 #undef __FUNCT__ 1627 #define __FUNCT__ "DMCoarsen" 1628 /*@ 1629 DMCoarsen - Coarsens a DM object 1630 1631 Collective on DM 1632 1633 Input Parameter: 1634 + dm - the DM object 1635 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL) 1636 1637 Output Parameter: 1638 . dmc - the coarsened DM 1639 1640 Level: developer 1641 1642 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1643 1644 @*/ 1645 PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc) 1646 { 1647 PetscErrorCode ierr; 1648 DMCoarsenHookLink link; 1649 1650 PetscFunctionBegin; 1651 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1652 ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr); 1653 (*dmc)->ops->creatematrix = dm->ops->creatematrix; 1654 (*dmc)->ops->initialguess = dm->ops->initialguess; 1655 (*dmc)->ops->function = dm->ops->function; 1656 (*dmc)->ops->functionj = dm->ops->functionj; 1657 if (dm->ops->jacobian != DMComputeJacobianDefault) { 1658 (*dmc)->ops->jacobian = dm->ops->jacobian; 1659 } 1660 ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr); 1661 (*dmc)->ctx = dm->ctx; 1662 (*dmc)->levelup = dm->levelup; 1663 (*dmc)->leveldown = dm->leveldown + 1; 1664 ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr); 1665 for (link=dm->coarsenhook; link; link=link->next) { 1666 if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);} 1667 } 1668 PetscFunctionReturn(0); 1669 } 1670 1671 #undef __FUNCT__ 1672 #define __FUNCT__ "DMCoarsenHookAdd" 1673 /*@ 1674 DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid 1675 1676 Logically Collective 1677 1678 Input Arguments: 1679 + fine - nonlinear solver context on which to run a hook when restricting to a coarser level 1680 . coarsenhook - function to run when setting up a coarser level 1681 . restricthook - function to run to update data on coarser levels (once per SNESSolve()) 1682 - ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL) 1683 1684 Calling sequence of coarsenhook: 1685 $ coarsenhook(DM fine,DM coarse,void *ctx); 1686 1687 + fine - fine level DM 1688 . coarse - coarse level DM to restrict problem to 1689 - ctx - optional user-defined function context 1690 1691 Calling sequence for restricthook: 1692 $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx) 1693 1694 + fine - fine level DM 1695 . mrestrict - matrix restricting a fine-level solution to the coarse grid 1696 . rscale - scaling vector for restriction 1697 . inject - matrix restricting by injection 1698 . coarse - coarse level DM to update 1699 - ctx - optional user-defined function context 1700 1701 Level: advanced 1702 1703 Notes: 1704 This function is only needed if auxiliary data needs to be set up on coarse grids. 1705 1706 If this function is called multiple times, the hooks will be run in the order they are added. 1707 1708 In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to 1709 extract the finest level information from its context (instead of from the SNES). 1710 1711 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate() 1712 @*/ 1713 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx) 1714 { 1715 PetscErrorCode ierr; 1716 DMCoarsenHookLink link,*p; 1717 1718 PetscFunctionBegin; 1719 PetscValidHeaderSpecific(fine,DM_CLASSID,1); 1720 for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */ 1721 ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr); 1722 link->coarsenhook = coarsenhook; 1723 link->restricthook = restricthook; 1724 link->ctx = ctx; 1725 link->next = PETSC_NULL; 1726 *p = link; 1727 PetscFunctionReturn(0); 1728 } 1729 1730 #undef __FUNCT__ 1731 #define __FUNCT__ "DMRestrict" 1732 /*@ 1733 DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd() 1734 1735 Collective if any hooks are 1736 1737 Input Arguments: 1738 + fine - finer DM to use as a base 1739 . restrct - restriction matrix, apply using MatRestrict() 1740 . inject - injection matrix, also use MatRestrict() 1741 - coarse - coarer DM to update 1742 1743 Level: developer 1744 1745 .seealso: DMCoarsenHookAdd(), MatRestrict() 1746 @*/ 1747 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse) 1748 { 1749 PetscErrorCode ierr; 1750 DMCoarsenHookLink link; 1751 1752 PetscFunctionBegin; 1753 for (link=fine->coarsenhook; link; link=link->next) { 1754 if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);} 1755 } 1756 PetscFunctionReturn(0); 1757 } 1758 1759 #undef __FUNCT__ 1760 #define __FUNCT__ "DMGetCoarsenLevel" 1761 /*@ 1762 DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM. 1763 1764 Not Collective 1765 1766 Input Parameter: 1767 . dm - the DM object 1768 1769 Output Parameter: 1770 . level - number of coarsenings 1771 1772 Level: developer 1773 1774 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1775 1776 @*/ 1777 PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level) 1778 { 1779 PetscFunctionBegin; 1780 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1781 *level = dm->leveldown; 1782 PetscFunctionReturn(0); 1783 } 1784 1785 1786 1787 #undef __FUNCT__ 1788 #define __FUNCT__ "DMRefineHierarchy" 1789 /*@C 1790 DMRefineHierarchy - Refines a DM object, all levels at once 1791 1792 Collective on DM 1793 1794 Input Parameter: 1795 + dm - the DM object 1796 - nlevels - the number of levels of refinement 1797 1798 Output Parameter: 1799 . dmf - the refined DM hierarchy 1800 1801 Level: developer 1802 1803 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1804 1805 @*/ 1806 PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[]) 1807 { 1808 PetscErrorCode ierr; 1809 1810 PetscFunctionBegin; 1811 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1812 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1813 if (nlevels == 0) PetscFunctionReturn(0); 1814 if (dm->ops->refinehierarchy) { 1815 ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr); 1816 } else if (dm->ops->refine) { 1817 PetscInt i; 1818 1819 ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr); 1820 for (i=1; i<nlevels; i++) { 1821 ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr); 1822 } 1823 } else { 1824 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet"); 1825 } 1826 PetscFunctionReturn(0); 1827 } 1828 1829 #undef __FUNCT__ 1830 #define __FUNCT__ "DMCoarsenHierarchy" 1831 /*@C 1832 DMCoarsenHierarchy - Coarsens a DM object, all levels at once 1833 1834 Collective on DM 1835 1836 Input Parameter: 1837 + dm - the DM object 1838 - nlevels - the number of levels of coarsening 1839 1840 Output Parameter: 1841 . dmc - the coarsened DM hierarchy 1842 1843 Level: developer 1844 1845 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation() 1846 1847 @*/ 1848 PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[]) 1849 { 1850 PetscErrorCode ierr; 1851 1852 PetscFunctionBegin; 1853 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1854 if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative"); 1855 if (nlevels == 0) PetscFunctionReturn(0); 1856 PetscValidPointer(dmc,3); 1857 if (dm->ops->coarsenhierarchy) { 1858 ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr); 1859 } else if (dm->ops->coarsen) { 1860 PetscInt i; 1861 1862 ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr); 1863 for (i=1; i<nlevels; i++) { 1864 ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr); 1865 } 1866 } else { 1867 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet"); 1868 } 1869 PetscFunctionReturn(0); 1870 } 1871 1872 #undef __FUNCT__ 1873 #define __FUNCT__ "DMCreateAggregates" 1874 /*@ 1875 DMCreateAggregates - Gets the aggregates that map between 1876 grids associated with two DMs. 1877 1878 Collective on DM 1879 1880 Input Parameters: 1881 + dmc - the coarse grid DM 1882 - dmf - the fine grid DM 1883 1884 Output Parameters: 1885 . rest - the restriction matrix (transpose of the projection matrix) 1886 1887 Level: intermediate 1888 1889 .keywords: interpolation, restriction, multigrid 1890 1891 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation() 1892 @*/ 1893 PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest) 1894 { 1895 PetscErrorCode ierr; 1896 1897 PetscFunctionBegin; 1898 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 1899 PetscValidHeaderSpecific(dmf,DM_CLASSID,2); 1900 ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr); 1901 PetscFunctionReturn(0); 1902 } 1903 1904 #undef __FUNCT__ 1905 #define __FUNCT__ "DMSetApplicationContextDestroy" 1906 /*@C 1907 DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed 1908 1909 Not Collective 1910 1911 Input Parameters: 1912 + dm - the DM object 1913 - destroy - the destroy function 1914 1915 Level: intermediate 1916 1917 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1918 1919 @*/ 1920 PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**)) 1921 { 1922 PetscFunctionBegin; 1923 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1924 dm->ctxdestroy = destroy; 1925 PetscFunctionReturn(0); 1926 } 1927 1928 #undef __FUNCT__ 1929 #define __FUNCT__ "DMSetApplicationContext" 1930 /*@ 1931 DMSetApplicationContext - Set a user context into a DM object 1932 1933 Not Collective 1934 1935 Input Parameters: 1936 + dm - the DM object 1937 - ctx - the user context 1938 1939 Level: intermediate 1940 1941 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1942 1943 @*/ 1944 PetscErrorCode DMSetApplicationContext(DM dm,void *ctx) 1945 { 1946 PetscFunctionBegin; 1947 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1948 dm->ctx = ctx; 1949 PetscFunctionReturn(0); 1950 } 1951 1952 #undef __FUNCT__ 1953 #define __FUNCT__ "DMGetApplicationContext" 1954 /*@ 1955 DMGetApplicationContext - Gets a user context from a DM object 1956 1957 Not Collective 1958 1959 Input Parameter: 1960 . dm - the DM object 1961 1962 Output Parameter: 1963 . ctx - the user context 1964 1965 Level: intermediate 1966 1967 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext() 1968 1969 @*/ 1970 PetscErrorCode DMGetApplicationContext(DM dm,void *ctx) 1971 { 1972 PetscFunctionBegin; 1973 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1974 *(void**)ctx = dm->ctx; 1975 PetscFunctionReturn(0); 1976 } 1977 1978 #undef __FUNCT__ 1979 #define __FUNCT__ "DMSetInitialGuess" 1980 /*@C 1981 DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers 1982 1983 Logically Collective on DM 1984 1985 Input Parameter: 1986 + dm - the DM object to destroy 1987 - f - the function to compute the initial guess 1988 1989 Level: intermediate 1990 1991 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 1992 1993 @*/ 1994 PetscErrorCode DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec)) 1995 { 1996 PetscFunctionBegin; 1997 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1998 dm->ops->initialguess = f; 1999 PetscFunctionReturn(0); 2000 } 2001 2002 #undef __FUNCT__ 2003 #define __FUNCT__ "DMSetFunction" 2004 /*@C 2005 DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES 2006 2007 Logically Collective on DM 2008 2009 Input Parameter: 2010 + dm - the DM object 2011 - f - the function to compute (use PETSC_NULL to cancel a previous function that was set) 2012 2013 Level: intermediate 2014 2015 Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian 2016 computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian. 2017 2018 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2019 DMSetJacobian() 2020 2021 @*/ 2022 PetscErrorCode DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2023 { 2024 PetscFunctionBegin; 2025 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2026 dm->ops->function = f; 2027 if (f) { 2028 dm->ops->functionj = f; 2029 } 2030 PetscFunctionReturn(0); 2031 } 2032 2033 #undef __FUNCT__ 2034 #define __FUNCT__ "DMSetJacobian" 2035 /*@C 2036 DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES 2037 2038 Logically Collective on DM 2039 2040 Input Parameter: 2041 + dm - the DM object to destroy 2042 - f - the function to compute the matrix entries 2043 2044 Level: intermediate 2045 2046 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2047 DMSetFunction() 2048 2049 @*/ 2050 PetscErrorCode DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*)) 2051 { 2052 PetscFunctionBegin; 2053 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2054 dm->ops->jacobian = f; 2055 PetscFunctionReturn(0); 2056 } 2057 2058 #undef __FUNCT__ 2059 #define __FUNCT__ "DMSetVariableBounds" 2060 /*@C 2061 DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI. 2062 2063 Logically Collective on DM 2064 2065 Input Parameter: 2066 + dm - the DM object 2067 - f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set) 2068 2069 Level: intermediate 2070 2071 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2072 DMSetJacobian() 2073 2074 @*/ 2075 PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec)) 2076 { 2077 PetscFunctionBegin; 2078 dm->ops->computevariablebounds = f; 2079 PetscFunctionReturn(0); 2080 } 2081 2082 #undef __FUNCT__ 2083 #define __FUNCT__ "DMHasVariableBounds" 2084 /*@ 2085 DMHasVariableBounds - does the DM object have a variable bounds function? 2086 2087 Not Collective 2088 2089 Input Parameter: 2090 . dm - the DM object to destroy 2091 2092 Output Parameter: 2093 . flg - PETSC_TRUE if the variable bounds function exists 2094 2095 Level: developer 2096 2097 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2098 2099 @*/ 2100 PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg) 2101 { 2102 PetscFunctionBegin; 2103 *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE; 2104 PetscFunctionReturn(0); 2105 } 2106 2107 #undef __FUNCT__ 2108 #define __FUNCT__ "DMComputeVariableBounds" 2109 /*@C 2110 DMComputeVariableBounds - compute variable bounds used by SNESVI. 2111 2112 Logically Collective on DM 2113 2114 Input Parameters: 2115 + dm - the DM object to destroy 2116 - x - current solution at which the bounds are computed 2117 2118 Output parameters: 2119 + xl - lower bound 2120 - xu - upper bound 2121 2122 Level: intermediate 2123 2124 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2125 DMSetFunction(), DMSetVariableBounds() 2126 2127 @*/ 2128 PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu) 2129 { 2130 PetscErrorCode ierr; 2131 PetscFunctionBegin; 2132 PetscValidHeaderSpecific(xl,VEC_CLASSID,2); 2133 PetscValidHeaderSpecific(xu,VEC_CLASSID,2); 2134 if (dm->ops->computevariablebounds) { 2135 ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr); 2136 } 2137 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds."); 2138 PetscFunctionReturn(0); 2139 } 2140 2141 #undef __FUNCT__ 2142 #define __FUNCT__ "DMComputeInitialGuess" 2143 /*@ 2144 DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers 2145 2146 Collective on DM 2147 2148 Input Parameter: 2149 + dm - the DM object to destroy 2150 - x - the vector to hold the initial guess values 2151 2152 Level: developer 2153 2154 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat() 2155 2156 @*/ 2157 PetscErrorCode DMComputeInitialGuess(DM dm,Vec x) 2158 { 2159 PetscErrorCode ierr; 2160 PetscFunctionBegin; 2161 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2162 if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()"); 2163 ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr); 2164 PetscFunctionReturn(0); 2165 } 2166 2167 #undef __FUNCT__ 2168 #define __FUNCT__ "DMHasInitialGuess" 2169 /*@ 2170 DMHasInitialGuess - does the DM object have an initial guess function 2171 2172 Not Collective 2173 2174 Input Parameter: 2175 . dm - the DM object to destroy 2176 2177 Output Parameter: 2178 . flg - PETSC_TRUE if function exists 2179 2180 Level: developer 2181 2182 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2183 2184 @*/ 2185 PetscErrorCode DMHasInitialGuess(DM dm,PetscBool *flg) 2186 { 2187 PetscFunctionBegin; 2188 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2189 *flg = (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE; 2190 PetscFunctionReturn(0); 2191 } 2192 2193 #undef __FUNCT__ 2194 #define __FUNCT__ "DMHasFunction" 2195 /*@ 2196 DMHasFunction - does the DM object have a function 2197 2198 Not Collective 2199 2200 Input Parameter: 2201 . dm - the DM object to destroy 2202 2203 Output Parameter: 2204 . flg - PETSC_TRUE if function exists 2205 2206 Level: developer 2207 2208 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2209 2210 @*/ 2211 PetscErrorCode DMHasFunction(DM dm,PetscBool *flg) 2212 { 2213 PetscFunctionBegin; 2214 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2215 *flg = (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE; 2216 PetscFunctionReturn(0); 2217 } 2218 2219 #undef __FUNCT__ 2220 #define __FUNCT__ "DMHasJacobian" 2221 /*@ 2222 DMHasJacobian - does the DM object have a matrix function 2223 2224 Not Collective 2225 2226 Input Parameter: 2227 . dm - the DM object to destroy 2228 2229 Output Parameter: 2230 . flg - PETSC_TRUE if function exists 2231 2232 Level: developer 2233 2234 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian() 2235 2236 @*/ 2237 PetscErrorCode DMHasJacobian(DM dm,PetscBool *flg) 2238 { 2239 PetscFunctionBegin; 2240 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2241 *flg = (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE; 2242 PetscFunctionReturn(0); 2243 } 2244 2245 #undef __FUNCT__ 2246 #define __FUNCT__ "DMHasColoring" 2247 /*@ 2248 DMHasColoring - does the DM object have a method of providing a coloring? 2249 2250 Not Collective 2251 2252 Input Parameter: 2253 . dm - the DM object 2254 2255 Output Parameter: 2256 . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring(). 2257 2258 Level: developer 2259 2260 .seealso DMHasFunction(), DMCreateColoring() 2261 2262 @*/ 2263 PetscErrorCode DMHasColoring(DM dm,PetscBool *flg) 2264 { 2265 PetscFunctionBegin; 2266 *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE; 2267 PetscFunctionReturn(0); 2268 } 2269 2270 #undef __FUNCT__ 2271 #define __FUNCT__ "DMSetVec" 2272 /*@C 2273 DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear. 2274 2275 Collective on DM 2276 2277 Input Parameter: 2278 + dm - the DM object 2279 - x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems. 2280 2281 Level: developer 2282 2283 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2284 DMSetFunction(), DMSetJacobian(), DMSetVariableBounds() 2285 2286 @*/ 2287 PetscErrorCode DMSetVec(DM dm,Vec x) 2288 { 2289 PetscErrorCode ierr; 2290 PetscFunctionBegin; 2291 if (x) { 2292 if (!dm->x) { 2293 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2294 } 2295 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2296 } 2297 else if (dm->x) { 2298 ierr = VecDestroy(&dm->x); CHKERRQ(ierr); 2299 } 2300 PetscFunctionReturn(0); 2301 } 2302 2303 2304 #undef __FUNCT__ 2305 #define __FUNCT__ "DMComputeFunction" 2306 /*@ 2307 DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES 2308 2309 Collective on DM 2310 2311 Input Parameter: 2312 + dm - the DM object to destroy 2313 . x - the location where the function is evaluationed, may be ignored for linear problems 2314 - b - the vector to hold the right hand side entries 2315 2316 Level: developer 2317 2318 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2319 DMSetJacobian() 2320 2321 @*/ 2322 PetscErrorCode DMComputeFunction(DM dm,Vec x,Vec b) 2323 { 2324 PetscErrorCode ierr; 2325 PetscFunctionBegin; 2326 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2327 if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()"); 2328 PetscStackPush("DM user function"); 2329 ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr); 2330 PetscStackPop; 2331 PetscFunctionReturn(0); 2332 } 2333 2334 2335 2336 #undef __FUNCT__ 2337 #define __FUNCT__ "DMComputeJacobian" 2338 /*@ 2339 DMComputeJacobian - compute the matrix entries for the solver 2340 2341 Collective on DM 2342 2343 Input Parameter: 2344 + dm - the DM object 2345 . x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM 2346 . A - matrix that defines the operator for the linear solve 2347 - B - the matrix used to construct the preconditioner 2348 2349 Level: developer 2350 2351 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(), 2352 DMSetFunction() 2353 2354 @*/ 2355 PetscErrorCode DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag) 2356 { 2357 PetscErrorCode ierr; 2358 2359 PetscFunctionBegin; 2360 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2361 if (!dm->ops->jacobian) { 2362 ISColoring coloring; 2363 MatFDColoring fd; 2364 const MatType mtype; 2365 2366 ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr); 2367 ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr); 2368 ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr); 2369 ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr); 2370 ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr); 2371 ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr); 2372 ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr); 2373 2374 dm->fd = fd; 2375 dm->ops->jacobian = DMComputeJacobianDefault; 2376 2377 /* don't know why this is needed */ 2378 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 2379 } 2380 if (!x) x = dm->x; 2381 ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr); 2382 2383 /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */ 2384 if (x) { 2385 if (!dm->x) { 2386 ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr); 2387 } 2388 ierr = VecCopy(x,dm->x);CHKERRQ(ierr); 2389 } 2390 if (A != B) { 2391 ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2392 ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2393 } 2394 PetscFunctionReturn(0); 2395 } 2396 2397 2398 PetscFList DMList = PETSC_NULL; 2399 PetscBool DMRegisterAllCalled = PETSC_FALSE; 2400 2401 #undef __FUNCT__ 2402 #define __FUNCT__ "DMSetType" 2403 /*@C 2404 DMSetType - Builds a DM, for a particular DM implementation. 2405 2406 Collective on DM 2407 2408 Input Parameters: 2409 + dm - The DM object 2410 - method - The name of the DM type 2411 2412 Options Database Key: 2413 . -dm_type <type> - Sets the DM type; use -help for a list of available types 2414 2415 Notes: 2416 See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D). 2417 2418 Level: intermediate 2419 2420 .keywords: DM, set, type 2421 .seealso: DMGetType(), DMCreate() 2422 @*/ 2423 PetscErrorCode DMSetType(DM dm, const DMType method) 2424 { 2425 PetscErrorCode (*r)(DM); 2426 PetscBool match; 2427 PetscErrorCode ierr; 2428 2429 PetscFunctionBegin; 2430 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2431 ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr); 2432 if (match) PetscFunctionReturn(0); 2433 2434 if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);} 2435 ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); 2436 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method); 2437 2438 if (dm->ops->destroy) { 2439 ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr); 2440 dm->ops->destroy = PETSC_NULL; 2441 } 2442 ierr = (*r)(dm);CHKERRQ(ierr); 2443 ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr); 2444 PetscFunctionReturn(0); 2445 } 2446 2447 #undef __FUNCT__ 2448 #define __FUNCT__ "DMGetType" 2449 /*@C 2450 DMGetType - Gets the DM type name (as a string) from the DM. 2451 2452 Not Collective 2453 2454 Input Parameter: 2455 . dm - The DM 2456 2457 Output Parameter: 2458 . type - The DM type name 2459 2460 Level: intermediate 2461 2462 .keywords: DM, get, type, name 2463 .seealso: DMSetType(), DMCreate() 2464 @*/ 2465 PetscErrorCode DMGetType(DM dm, const DMType *type) 2466 { 2467 PetscErrorCode ierr; 2468 2469 PetscFunctionBegin; 2470 PetscValidHeaderSpecific(dm, DM_CLASSID,1); 2471 PetscValidCharPointer(type,2); 2472 if (!DMRegisterAllCalled) { 2473 ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr); 2474 } 2475 *type = ((PetscObject)dm)->type_name; 2476 PetscFunctionReturn(0); 2477 } 2478 2479 #undef __FUNCT__ 2480 #define __FUNCT__ "DMConvert" 2481 /*@C 2482 DMConvert - Converts a DM to another DM, either of the same or different type. 2483 2484 Collective on DM 2485 2486 Input Parameters: 2487 + dm - the DM 2488 - newtype - new DM type (use "same" for the same type) 2489 2490 Output Parameter: 2491 . M - pointer to new DM 2492 2493 Notes: 2494 Cannot be used to convert a sequential DM to parallel or parallel to sequential, 2495 the MPI communicator of the generated DM is always the same as the communicator 2496 of the input DM. 2497 2498 Level: intermediate 2499 2500 .seealso: DMCreate() 2501 @*/ 2502 PetscErrorCode DMConvert(DM dm, const DMType newtype, DM *M) 2503 { 2504 DM B; 2505 char convname[256]; 2506 PetscBool sametype, issame; 2507 PetscErrorCode ierr; 2508 2509 PetscFunctionBegin; 2510 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2511 PetscValidType(dm,1); 2512 PetscValidPointer(M,3); 2513 ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr); 2514 ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr); 2515 { 2516 PetscErrorCode (*conv)(DM, const DMType, DM *) = PETSC_NULL; 2517 2518 /* 2519 Order of precedence: 2520 1) See if a specialized converter is known to the current DM. 2521 2) See if a specialized converter is known to the desired DM class. 2522 3) See if a good general converter is registered for the desired class 2523 4) See if a good general converter is known for the current matrix. 2524 5) Use a really basic converter. 2525 */ 2526 2527 /* 1) See if a specialized converter is known to the current DM and the desired class */ 2528 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2529 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2530 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2531 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2532 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2533 ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2534 if (conv) goto foundconv; 2535 2536 /* 2) See if a specialized converter is known to the desired DM class. */ 2537 ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr); 2538 ierr = DMSetType(B, newtype);CHKERRQ(ierr); 2539 ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr); 2540 ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr); 2541 ierr = PetscStrcat(convname,"_");CHKERRQ(ierr); 2542 ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr); 2543 ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr); 2544 ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr); 2545 if (conv) { 2546 ierr = DMDestroy(&B);CHKERRQ(ierr); 2547 goto foundconv; 2548 } 2549 2550 #if 0 2551 /* 3) See if a good general converter is registered for the desired class */ 2552 conv = B->ops->convertfrom; 2553 ierr = DMDestroy(&B);CHKERRQ(ierr); 2554 if (conv) goto foundconv; 2555 2556 /* 4) See if a good general converter is known for the current matrix */ 2557 if (dm->ops->convert) { 2558 conv = dm->ops->convert; 2559 } 2560 if (conv) goto foundconv; 2561 #endif 2562 2563 /* 5) Use a really basic converter. */ 2564 SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype); 2565 2566 foundconv: 2567 ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2568 ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr); 2569 ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr); 2570 } 2571 ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr); 2572 PetscFunctionReturn(0); 2573 } 2574 2575 /*--------------------------------------------------------------------------------------------------------------------*/ 2576 2577 #undef __FUNCT__ 2578 #define __FUNCT__ "DMRegister" 2579 /*@C 2580 DMRegister - See DMRegisterDynamic() 2581 2582 Level: advanced 2583 @*/ 2584 PetscErrorCode DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM)) 2585 { 2586 char fullname[PETSC_MAX_PATH_LEN]; 2587 PetscErrorCode ierr; 2588 2589 PetscFunctionBegin; 2590 ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr); 2591 ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr); 2592 ierr = PetscStrcat(fullname, name);CHKERRQ(ierr); 2593 ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr); 2594 PetscFunctionReturn(0); 2595 } 2596 2597 2598 /*--------------------------------------------------------------------------------------------------------------------*/ 2599 #undef __FUNCT__ 2600 #define __FUNCT__ "DMRegisterDestroy" 2601 /*@C 2602 DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic(). 2603 2604 Not Collective 2605 2606 Level: advanced 2607 2608 .keywords: DM, register, destroy 2609 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic() 2610 @*/ 2611 PetscErrorCode DMRegisterDestroy(void) 2612 { 2613 PetscErrorCode ierr; 2614 2615 PetscFunctionBegin; 2616 ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr); 2617 DMRegisterAllCalled = PETSC_FALSE; 2618 PetscFunctionReturn(0); 2619 } 2620 2621 #if defined(PETSC_HAVE_MATLAB_ENGINE) 2622 #include <mex.h> 2623 2624 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext; 2625 2626 #undef __FUNCT__ 2627 #define __FUNCT__ "DMComputeFunction_Matlab" 2628 /* 2629 DMComputeFunction_Matlab - Calls the function that has been set with 2630 DMSetFunctionMatlab(). 2631 2632 For linear problems x is null 2633 2634 .seealso: DMSetFunction(), DMGetFunction() 2635 */ 2636 PetscErrorCode DMComputeFunction_Matlab(DM dm,Vec x,Vec y) 2637 { 2638 PetscErrorCode ierr; 2639 DMMatlabContext *sctx; 2640 int nlhs = 1,nrhs = 4; 2641 mxArray *plhs[1],*prhs[4]; 2642 long long int lx = 0,ly = 0,ls = 0; 2643 2644 PetscFunctionBegin; 2645 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2646 PetscValidHeaderSpecific(y,VEC_CLASSID,3); 2647 PetscCheckSameComm(dm,1,y,3); 2648 2649 /* call Matlab function in ctx with arguments x and y */ 2650 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2651 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2652 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2653 ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr); 2654 prhs[0] = mxCreateDoubleScalar((double)ls); 2655 prhs[1] = mxCreateDoubleScalar((double)lx); 2656 prhs[2] = mxCreateDoubleScalar((double)ly); 2657 prhs[3] = mxCreateString(sctx->funcname); 2658 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr); 2659 ierr = mxGetScalar(plhs[0]);CHKERRQ(ierr); 2660 mxDestroyArray(prhs[0]); 2661 mxDestroyArray(prhs[1]); 2662 mxDestroyArray(prhs[2]); 2663 mxDestroyArray(prhs[3]); 2664 mxDestroyArray(plhs[0]); 2665 PetscFunctionReturn(0); 2666 } 2667 2668 2669 #undef __FUNCT__ 2670 #define __FUNCT__ "DMSetFunctionMatlab" 2671 /* 2672 DMSetFunctionMatlab - Sets the function evaluation routine 2673 2674 */ 2675 PetscErrorCode DMSetFunctionMatlab(DM dm,const char *func) 2676 { 2677 PetscErrorCode ierr; 2678 DMMatlabContext *sctx; 2679 2680 PetscFunctionBegin; 2681 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2682 /* currently sctx is memory bleed */ 2683 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2684 if (!sctx) { 2685 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2686 } 2687 ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr); 2688 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2689 ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr); 2690 PetscFunctionReturn(0); 2691 } 2692 2693 #undef __FUNCT__ 2694 #define __FUNCT__ "DMComputeJacobian_Matlab" 2695 /* 2696 DMComputeJacobian_Matlab - Calls the function that has been set with 2697 DMSetJacobianMatlab(). 2698 2699 For linear problems x is null 2700 2701 .seealso: DMSetFunction(), DMGetFunction() 2702 */ 2703 PetscErrorCode DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str) 2704 { 2705 PetscErrorCode ierr; 2706 DMMatlabContext *sctx; 2707 int nlhs = 2,nrhs = 5; 2708 mxArray *plhs[2],*prhs[5]; 2709 long long int lx = 0,lA = 0,lB = 0,ls = 0; 2710 2711 PetscFunctionBegin; 2712 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2713 PetscValidHeaderSpecific(A,MAT_CLASSID,3); 2714 2715 /* call MATLAB function in ctx with arguments x, A, and B */ 2716 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2717 ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr); 2718 ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr); 2719 ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr); 2720 ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr); 2721 prhs[0] = mxCreateDoubleScalar((double)ls); 2722 prhs[1] = mxCreateDoubleScalar((double)lx); 2723 prhs[2] = mxCreateDoubleScalar((double)lA); 2724 prhs[3] = mxCreateDoubleScalar((double)lB); 2725 prhs[4] = mxCreateString(sctx->jacname); 2726 ierr = mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr); 2727 *str = (MatStructure) mxGetScalar(plhs[0]); 2728 ierr = (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr); 2729 mxDestroyArray(prhs[0]); 2730 mxDestroyArray(prhs[1]); 2731 mxDestroyArray(prhs[2]); 2732 mxDestroyArray(prhs[3]); 2733 mxDestroyArray(prhs[4]); 2734 mxDestroyArray(plhs[0]); 2735 mxDestroyArray(plhs[1]); 2736 PetscFunctionReturn(0); 2737 } 2738 2739 2740 #undef __FUNCT__ 2741 #define __FUNCT__ "DMSetJacobianMatlab" 2742 /* 2743 DMSetJacobianMatlab - Sets the Jacobian function evaluation routine 2744 2745 */ 2746 PetscErrorCode DMSetJacobianMatlab(DM dm,const char *func) 2747 { 2748 PetscErrorCode ierr; 2749 DMMatlabContext *sctx; 2750 2751 PetscFunctionBegin; 2752 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 2753 /* currently sctx is memory bleed */ 2754 ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr); 2755 if (!sctx) { 2756 ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr); 2757 } 2758 ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr); 2759 ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr); 2760 ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr); 2761 PetscFunctionReturn(0); 2762 } 2763 #endif 2764 2765 #undef __FUNCT__ 2766 #define __FUNCT__ "DMLoad" 2767 /*@C 2768 DMLoad - Loads a DM that has been stored in binary or HDF5 format 2769 with DMView(). 2770 2771 Collective on PetscViewer 2772 2773 Input Parameters: 2774 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or 2775 some related function before a call to DMLoad(). 2776 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or 2777 HDF5 file viewer, obtained from PetscViewerHDF5Open() 2778 2779 Level: intermediate 2780 2781 Notes: 2782 Defaults to the DM DA. 2783 2784 Notes for advanced users: 2785 Most users should not need to know the details of the binary storage 2786 format, since DMLoad() and DMView() completely hide these details. 2787 But for anyone who's interested, the standard binary matrix storage 2788 format is 2789 .vb 2790 has not yet been determined 2791 .ve 2792 2793 In addition, PETSc automatically does the byte swapping for 2794 machines that store the bytes reversed, e.g. DEC alpha, freebsd, 2795 linux, Windows and the paragon; thus if you write your own binary 2796 read/write routines you have to swap the bytes; see PetscBinaryRead() 2797 and PetscBinaryWrite() to see how this may be done. 2798 2799 Concepts: vector^loading from file 2800 2801 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad() 2802 @*/ 2803 PetscErrorCode DMLoad(DM newdm, PetscViewer viewer) 2804 { 2805 PetscErrorCode ierr; 2806 2807 PetscFunctionBegin; 2808 PetscValidHeaderSpecific(newdm,DM_CLASSID,1); 2809 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 2810 2811 if (!((PetscObject)newdm)->type_name) { 2812 ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr); 2813 } 2814 ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr); 2815 PetscFunctionReturn(0); 2816 } 2817 2818 /******************************** FEM Support **********************************/ 2819 2820 #undef __FUNCT__ 2821 #define __FUNCT__ "DMPrintCellVector" 2822 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) { 2823 PetscInt f; 2824 PetscErrorCode ierr; 2825 2826 PetscFunctionBegin; 2827 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2828 for (f = 0; f < len; ++f) { 2829 ierr = PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr); 2830 } 2831 PetscFunctionReturn(0); 2832 } 2833 2834 #undef __FUNCT__ 2835 #define __FUNCT__ "DMPrintCellMatrix" 2836 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) { 2837 PetscInt f, g; 2838 PetscErrorCode ierr; 2839 2840 PetscFunctionBegin; 2841 ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr); 2842 for (f = 0; f < rows; ++f) { 2843 ierr = PetscPrintf(PETSC_COMM_SELF, " |");CHKERRQ(ierr); 2844 for (g = 0; g < cols; ++g) { 2845 ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr); 2846 } 2847 ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr); 2848 } 2849 PetscFunctionReturn(0); 2850 } 2851 2852 #undef __FUNCT__ 2853 #define __FUNCT__ "DMGetLocalFunction" 2854 /*@C 2855 DMGetLocalFunction - Get the local residual function from this DM 2856 2857 Not collective 2858 2859 Input Parameter: 2860 . dm - The DM 2861 2862 Output Parameter: 2863 . lf - The local residual function 2864 2865 Calling sequence of lf: 2866 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2867 2868 + snes - the SNES context 2869 . x - local vector with the state at which to evaluate residual 2870 . f - local vector to put residual in 2871 - ctx - optional user-defined function context 2872 2873 Level: intermediate 2874 2875 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2876 @*/ 2877 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *)) 2878 { 2879 PetscFunctionBegin; 2880 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2881 if (lf) *lf = dm->lf; 2882 PetscFunctionReturn(0); 2883 } 2884 2885 #undef __FUNCT__ 2886 #define __FUNCT__ "DMSetLocalFunction" 2887 /*@C 2888 DMSetLocalFunction - Set the local residual function from this DM 2889 2890 Not collective 2891 2892 Input Parameters: 2893 + dm - The DM 2894 - lf - The local residual function 2895 2896 Calling sequence of lf: 2897 $ lf (SNES snes, Vec x, Vec f, void *ctx); 2898 2899 + snes - the SNES context 2900 . x - local vector with the state at which to evaluate residual 2901 . f - local vector to put residual in 2902 - ctx - optional user-defined function context 2903 2904 Level: intermediate 2905 2906 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian() 2907 @*/ 2908 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *)) 2909 { 2910 PetscFunctionBegin; 2911 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2912 dm->lf = lf; 2913 PetscFunctionReturn(0); 2914 } 2915 2916 #undef __FUNCT__ 2917 #define __FUNCT__ "DMGetLocalJacobian" 2918 /*@C 2919 DMGetLocalJacobian - Get the local Jacobian function from this DM 2920 2921 Not collective 2922 2923 Input Parameter: 2924 . dm - The DM 2925 2926 Output Parameter: 2927 . lj - The local Jacobian function 2928 2929 Calling sequence of lj: 2930 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 2931 2932 + snes - the SNES context 2933 . x - local vector with the state at which to evaluate residual 2934 . J - matrix to put Jacobian in 2935 . M - matrix to use for defining Jacobian preconditioner 2936 - ctx - optional user-defined function context 2937 2938 Level: intermediate 2939 2940 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 2941 @*/ 2942 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *)) 2943 { 2944 PetscFunctionBegin; 2945 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2946 if (lj) *lj = dm->lj; 2947 PetscFunctionReturn(0); 2948 } 2949 2950 #undef __FUNCT__ 2951 #define __FUNCT__ "DMSetLocalJacobian" 2952 /*@C 2953 DMSetLocalJacobian - Set the local Jacobian function from this DM 2954 2955 Not collective 2956 2957 Input Parameters: 2958 + dm - The DM 2959 - lj - The local Jacobian function 2960 2961 Calling sequence of lj: 2962 $ lj (SNES snes, Vec x, Mat J, Mat M, void *ctx); 2963 2964 + snes - the SNES context 2965 . x - local vector with the state at which to evaluate residual 2966 . J - matrix to put Jacobian in 2967 . M - matrix to use for defining Jacobian preconditioner 2968 - ctx - optional user-defined function context 2969 2970 Level: intermediate 2971 2972 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction() 2973 @*/ 2974 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat, Mat, void *)) 2975 { 2976 PetscFunctionBegin; 2977 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 2978 dm->lj = lj; 2979 PetscFunctionReturn(0); 2980 } 2981 2982 #undef __FUNCT__ 2983 #define __FUNCT__ "DMGetDefaultSection" 2984 /*@ 2985 DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM. 2986 2987 Input Parameter: 2988 . dm - The DM 2989 2990 Output Parameter: 2991 . section - The PetscSection 2992 2993 Level: intermediate 2994 2995 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 2996 2997 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 2998 @*/ 2999 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) { 3000 PetscFunctionBegin; 3001 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3002 PetscValidPointer(section, 2); 3003 *section = dm->defaultSection; 3004 PetscFunctionReturn(0); 3005 } 3006 3007 #undef __FUNCT__ 3008 #define __FUNCT__ "DMSetDefaultSection" 3009 /*@ 3010 DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM. 3011 3012 Input Parameters: 3013 + dm - The DM 3014 - section - The PetscSection 3015 3016 Level: intermediate 3017 3018 Note: Any existing Section will be destroyed 3019 3020 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection() 3021 @*/ 3022 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) { 3023 PetscInt numFields; 3024 PetscInt f; 3025 PetscErrorCode ierr; 3026 3027 PetscFunctionBegin; 3028 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3029 ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr); 3030 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3031 dm->defaultSection = section; 3032 ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr); 3033 if (numFields) { 3034 ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr); 3035 for (f = 0; f < numFields; ++f) { 3036 const char *name; 3037 3038 ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr); 3039 ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr); 3040 } 3041 } 3042 PetscFunctionReturn(0); 3043 } 3044 3045 #undef __FUNCT__ 3046 #define __FUNCT__ "DMGetDefaultGlobalSection" 3047 /*@ 3048 DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM. 3049 3050 Input Parameter: 3051 . dm - The DM 3052 3053 Output Parameter: 3054 . section - The PetscSection 3055 3056 Level: intermediate 3057 3058 Note: This gets a borrowed reference, so the user should not destroy this PetscSection. 3059 3060 .seealso: DMSetDefaultSection(), DMGetDefaultSection() 3061 @*/ 3062 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) { 3063 PetscErrorCode ierr; 3064 3065 PetscFunctionBegin; 3066 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3067 PetscValidPointer(section, 2); 3068 if (!dm->defaultGlobalSection) { 3069 ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr); 3070 } 3071 *section = dm->defaultGlobalSection; 3072 PetscFunctionReturn(0); 3073 } 3074 3075 #undef __FUNCT__ 3076 #define __FUNCT__ "DMSetDefaultGlobalSection" 3077 /*@ 3078 DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM. 3079 3080 Input Parameters: 3081 + dm - The DM 3082 - section - The PetscSection 3083 3084 Level: intermediate 3085 3086 Note: Any existing Section will be destroyed 3087 3088 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection() 3089 @*/ 3090 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) { 3091 PetscErrorCode ierr; 3092 3093 PetscFunctionBegin; 3094 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3095 ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr); 3096 dm->defaultGlobalSection = section; 3097 PetscFunctionReturn(0); 3098 } 3099 3100 #undef __FUNCT__ 3101 #define __FUNCT__ "DMGetDefaultSF" 3102 /*@ 3103 DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set, 3104 it is created from the default PetscSection layouts in the DM. 3105 3106 Input Parameter: 3107 . dm - The DM 3108 3109 Output Parameter: 3110 . sf - The PetscSF 3111 3112 Level: intermediate 3113 3114 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3115 3116 .seealso: DMSetDefaultSF(), DMCreateDefaultSF() 3117 @*/ 3118 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) { 3119 PetscInt nroots; 3120 PetscErrorCode ierr; 3121 3122 PetscFunctionBegin; 3123 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3124 PetscValidPointer(sf, 2); 3125 ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr); 3126 if (nroots < 0) { 3127 PetscSection section, gSection; 3128 3129 ierr = DMGetDefaultSection(dm, §ion);CHKERRQ(ierr); 3130 if (section) { 3131 ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr); 3132 ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr); 3133 } else { 3134 *sf = PETSC_NULL; 3135 PetscFunctionReturn(0); 3136 } 3137 } 3138 *sf = dm->defaultSF; 3139 PetscFunctionReturn(0); 3140 } 3141 3142 #undef __FUNCT__ 3143 #define __FUNCT__ "DMSetDefaultSF" 3144 /*@ 3145 DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM 3146 3147 Input Parameters: 3148 + dm - The DM 3149 - sf - The PetscSF 3150 3151 Level: intermediate 3152 3153 Note: Any previous SF is destroyed 3154 3155 .seealso: DMGetDefaultSF(), DMCreateDefaultSF() 3156 @*/ 3157 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) { 3158 PetscErrorCode ierr; 3159 3160 PetscFunctionBegin; 3161 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3162 PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2); 3163 ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr); 3164 dm->defaultSF = sf; 3165 PetscFunctionReturn(0); 3166 } 3167 3168 #undef __FUNCT__ 3169 #define __FUNCT__ "DMCreateDefaultSF" 3170 /*@C 3171 DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections 3172 describing the data layout. 3173 3174 Input Parameters: 3175 + dm - The DM 3176 . localSection - PetscSection describing the local data layout 3177 - globalSection - PetscSection describing the global data layout 3178 3179 Level: intermediate 3180 3181 .seealso: DMGetDefaultSF(), DMSetDefaultSF() 3182 @*/ 3183 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection) 3184 { 3185 MPI_Comm comm = ((PetscObject) dm)->comm; 3186 PetscLayout layout; 3187 const PetscInt *ranges; 3188 PetscInt *local; 3189 PetscSFNode *remote; 3190 PetscInt pStart, pEnd, p, nroots, nleaves, l; 3191 PetscMPIInt size, rank; 3192 PetscErrorCode ierr; 3193 3194 PetscFunctionBegin; 3195 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3196 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 3197 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 3198 ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr); 3199 ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr); 3200 ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr); 3201 ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr); 3202 ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr); 3203 ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr); 3204 ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr); 3205 for (p = pStart, nleaves = 0; p < pEnd; ++p) { 3206 PetscInt gdof, gcdof; 3207 3208 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3209 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3210 nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3211 } 3212 ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr); 3213 ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr); 3214 for (p = pStart, l = 0; p < pEnd; ++p) { 3215 PetscInt *cind; 3216 PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c; 3217 3218 ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr); 3219 ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr); 3220 ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr); 3221 ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr); 3222 ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr); 3223 ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr); 3224 ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr); 3225 if (!gdof) continue; /* Censored point */ 3226 gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof; 3227 if (gsize != dof-cdof) { 3228 if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", size, p, dof-cdof, dof); 3229 cdof = 0; /* Ignore constraints */ 3230 } 3231 for (d = 0, c = 0; d < dof; ++d) { 3232 if ((c < cdof) && (cind[c] == d)) {++c; continue;} 3233 local[l+d-c] = off+d; 3234 } 3235 if (gdof < 0) { 3236 for(d = 0; d < gsize; ++d, ++l) { 3237 PetscInt offset = -(goff+1) + d, r; 3238 3239 for (r = 0; r < size; ++r) { 3240 if ((offset >= ranges[r]) && (offset < ranges[r+1])) break; 3241 } 3242 remote[l].rank = r; 3243 remote[l].index = offset - ranges[r]; 3244 } 3245 } else { 3246 for(d = 0; d < gsize; ++d, ++l) { 3247 remote[l].rank = rank; 3248 remote[l].index = goff+d - ranges[rank]; 3249 } 3250 } 3251 } 3252 if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves); 3253 ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr); 3254 ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr); 3255 PetscFunctionReturn(0); 3256 } 3257 3258 #undef __FUNCT__ 3259 #define __FUNCT__ "DMGetPointSF" 3260 /*@ 3261 DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM. 3262 3263 Input Parameter: 3264 . dm - The DM 3265 3266 Output Parameter: 3267 . sf - The PetscSF 3268 3269 Level: intermediate 3270 3271 Note: This gets a borrowed reference, so the user should not destroy this PetscSF. 3272 3273 .seealso: DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF() 3274 @*/ 3275 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) { 3276 PetscFunctionBegin; 3277 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3278 PetscValidPointer(sf, 2); 3279 *sf = dm->sf; 3280 PetscFunctionReturn(0); 3281 } 3282 3283 #undef __FUNCT__ 3284 #define __FUNCT__ "DMGetNumFields" 3285 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields) 3286 { 3287 PetscFunctionBegin; 3288 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3289 PetscValidPointer(numFields, 2); 3290 *numFields = dm->numFields; 3291 PetscFunctionReturn(0); 3292 } 3293 3294 #undef __FUNCT__ 3295 #define __FUNCT__ "DMSetNumFields" 3296 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields) 3297 { 3298 PetscInt f; 3299 PetscErrorCode ierr; 3300 3301 PetscFunctionBegin; 3302 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3303 for (f = 0; f < dm->numFields; ++f) { 3304 ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr); 3305 } 3306 ierr = PetscFree(dm->fields);CHKERRQ(ierr); 3307 dm->numFields = numFields; 3308 ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr); 3309 for (f = 0; f < dm->numFields; ++f) { 3310 ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr); 3311 } 3312 PetscFunctionReturn(0); 3313 } 3314 3315 #undef __FUNCT__ 3316 #define __FUNCT__ "DMGetField" 3317 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field) 3318 { 3319 PetscFunctionBegin; 3320 PetscValidHeaderSpecific(dm, DM_CLASSID, 1); 3321 PetscValidPointer(field, 2); 3322 if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()"); 3323 if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields); 3324 *field = dm->fields[f]; 3325 PetscFunctionReturn(0); 3326 } 3327 3328 #undef __FUNCT__ 3329 #define __FUNCT__ "DMSetCoordinates" 3330 /*@ 3331 DMSetCoordinates - Sets into the DM a global vector that holds the coordinates 3332 3333 Collective on DM 3334 3335 Input Parameters: 3336 + dm - the DM 3337 - c - coordinate vector 3338 3339 Note: 3340 The coordinates do include those for ghost points, which are in the local vector 3341 3342 Level: intermediate 3343 3344 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3345 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM() 3346 @*/ 3347 PetscErrorCode DMSetCoordinates(DM dm, Vec c) 3348 { 3349 PetscErrorCode ierr; 3350 3351 PetscFunctionBegin; 3352 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3353 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3354 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3355 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3356 dm->coordinates = c; 3357 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3358 PetscFunctionReturn(0); 3359 } 3360 3361 #undef __FUNCT__ 3362 #define __FUNCT__ "DMSetCoordinatesLocal" 3363 /*@ 3364 DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates 3365 3366 Collective on DM 3367 3368 Input Parameters: 3369 + dm - the DM 3370 - c - coordinate vector 3371 3372 Note: 3373 The coordinates of ghost points can be set using DMSetCoordinates() 3374 followed by DMGetCoordinatesLocal(). This is intended to enable the 3375 setting of ghost coordinates outside of the domain. 3376 3377 Level: intermediate 3378 3379 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3380 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM() 3381 @*/ 3382 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c) 3383 { 3384 PetscErrorCode ierr; 3385 3386 PetscFunctionBegin; 3387 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3388 PetscValidHeaderSpecific(c,VEC_CLASSID,2); 3389 ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr); 3390 ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr); 3391 dm->coordinatesLocal = c; 3392 ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr); 3393 PetscFunctionReturn(0); 3394 } 3395 3396 #undef __FUNCT__ 3397 #define __FUNCT__ "DMGetCoordinates" 3398 /*@ 3399 DMGetCoordinates - Gets a global vector with the coordinates associated with the DM. 3400 3401 Not Collective 3402 3403 Input Parameter: 3404 . dm - the DM 3405 3406 Output Parameter: 3407 . c - global coordinate vector 3408 3409 Note: 3410 This is a borrowed reference, so the user should NOT destroy this vector 3411 3412 Each process has only the local coordinates (does NOT have the ghost coordinates). 3413 3414 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3415 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3416 3417 Level: intermediate 3418 3419 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3420 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM() 3421 @*/ 3422 PetscErrorCode DMGetCoordinates(DM dm, Vec *c) 3423 { 3424 PetscErrorCode ierr; 3425 3426 PetscFunctionBegin; 3427 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3428 PetscValidPointer(c,2); 3429 if (!dm->coordinates) { 3430 DM cdm; 3431 3432 if (!dm->coordinatesLocal) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ORDER, "You must call DMSetCoordinates() or DMSetCoordinatesLocal() before this call"); 3433 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3434 ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr); 3435 ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr); 3436 ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3437 ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr); 3438 } 3439 *c = dm->coordinates; 3440 PetscFunctionReturn(0); 3441 } 3442 3443 #undef __FUNCT__ 3444 #define __FUNCT__ "DMGetCoordinatesLocal" 3445 /*@ 3446 DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM. 3447 3448 Collective on DM 3449 3450 Input Parameter: 3451 . dm - the DM 3452 3453 Output Parameter: 3454 . c - coordinate vector 3455 3456 Note: 3457 This is a borrowed reference, so the user should NOT destroy this vector 3458 3459 Each process has the local and ghost coordinates 3460 3461 For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...) 3462 and (x_0,y_0,z_0,x_1,y_1,z_1...) 3463 3464 Level: intermediate 3465 3466 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3467 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM() 3468 @*/ 3469 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c) 3470 { 3471 PetscErrorCode ierr; 3472 3473 PetscFunctionBegin; 3474 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3475 PetscValidPointer(c,2); 3476 if (!dm->coordinatesLocal) { 3477 DM cdm; 3478 3479 if (!dm->coordinates) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ORDER, "You must call DMSetCoordinates() or DMSetCoordinatesLocal() before this call"); 3480 ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr); 3481 ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr); 3482 ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr); 3483 ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3484 ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr); 3485 } 3486 *c = dm->coordinatesLocal; 3487 PetscFunctionReturn(0); 3488 } 3489 3490 #undef __FUNCT__ 3491 #define __FUNCT__ "DMGetCoordinateDM" 3492 /*@ 3493 DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates 3494 3495 Collective on DM 3496 3497 Input Parameter: 3498 . dm - the DM 3499 3500 Output Parameter: 3501 . cdm - coordinate DM 3502 3503 Level: intermediate 3504 3505 .keywords: distributed array, get, corners, nodes, local indices, coordinates 3506 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal() 3507 @*/ 3508 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm) 3509 { 3510 PetscErrorCode ierr; 3511 3512 PetscFunctionBegin; 3513 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 3514 PetscValidPointer(cdm,2); 3515 if (!dm->coordinateDM) { 3516 if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM"); 3517 ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr); 3518 } 3519 *cdm = dm->coordinateDM; 3520 PetscFunctionReturn(0); 3521 } 3522