1 #define PETSCDM_DLL 2 3 #include "petscdm.h" /*I "petscdm.h" I*/ 4 #include "private/dmimpl.h" 5 #include "petscmat.h" 6 7 /* 8 rstart is where an array/subvector starts in the global parallel vector, so arrays 9 rstarts are meaningless (and set to the previous one) except on the processor where the array lives 10 */ 11 12 typedef enum {DMCOMPOSITE_ARRAY, DMCOMPOSITE_DM} DMCompositeLinkType; 13 14 struct DMCompositeLink { 15 DMCompositeLinkType type; 16 struct DMCompositeLink *next; 17 PetscInt n; /* number of owned */ 18 PetscInt rstart; /* rstart is relative to this process */ 19 PetscInt grstart; /* grstart is relative to all processes */ 20 PetscInt nlocal; 21 22 /* only used for DMCOMPOSITE_DM */ 23 PetscInt *grstarts; /* global row for first unknown of this DM on each process */ 24 DM dm; 25 26 /* only used for DMCOMPOSITE_ARRAY */ 27 PetscMPIInt rank; /* process where array unknowns live */ 28 }; 29 30 typedef struct { 31 PetscInt n,N,rstart; /* rstart is relative to all processors, n unknowns owned by this process, N is total unknowns */ 32 PetscInt nghost; /* number of all local entries include DMDA ghost points and any shared redundant arrays */ 33 PetscInt nDM,nredundant,nmine; /* how many DM's and seperate redundant arrays used to build DM(nmine is ones on this process) */ 34 PetscBool setup; /* after this is set, cannot add new links to the DM*/ 35 struct DMCompositeLink *next; 36 37 PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt); 38 } DM_Composite; 39 40 #undef __FUNCT__ 41 #define __FUNCT__ "DMCompositeSetCoupling" 42 /*@C 43 DMCompositeSetCoupling - Sets user provided routines that compute the coupling between the 44 seperate components (DMDA's and arrays) in a DMto build the correct matrix nonzero structure. 45 46 47 Logically Collective on MPI_Comm 48 49 Input Parameter: 50 + dm - the composite object 51 - formcouplelocations - routine to set the nonzero locations in the matrix 52 53 Level: advanced 54 55 Notes: See DMCompositeSetContext() and DMCompositeGetContext() for how to get user information into 56 this routine 57 58 @*/ 59 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetCoupling(DM dm,PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt)) 60 { 61 DM_Composite *com = (DM_Composite*)dm->data; 62 63 PetscFunctionBegin; 64 com->FormCoupleLocations = FormCoupleLocations; 65 PetscFunctionReturn(0); 66 } 67 68 #undef __FUNCT__ 69 #define __FUNCT__ "DMCompositeSetContext" 70 /*@ 71 DMCompositeSetContext - Allows user to stash data they may need within the form coupling routine they 72 set with DMCompositeSetCoupling() 73 74 75 Not Collective 76 77 Input Parameter: 78 + dm - the composite object 79 - ctx - the user supplied context 80 81 Level: advanced 82 83 Notes: Use DMCompositeGetContext() to retrieve the context when needed. 84 85 @*/ 86 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetContext(DM dm,void *ctx) 87 { 88 PetscFunctionBegin; 89 dm->ctx = ctx; 90 PetscFunctionReturn(0); 91 } 92 93 #undef __FUNCT__ 94 #define __FUNCT__ "DMCompositeGetContext" 95 /*@ 96 DMCompositeGetContext - Access the context set with DMCompositeSetContext() 97 98 99 Not Collective 100 101 Input Parameter: 102 . dm - the composite object 103 104 Output Parameter: 105 . ctx - the user supplied context 106 107 Level: advanced 108 109 Notes: Use DMCompositeGetContext() to retrieve the context when needed. 110 111 @*/ 112 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetContext(DM dm,void **ctx) 113 { 114 PetscFunctionBegin; 115 *ctx = dm->ctx; 116 PetscFunctionReturn(0); 117 } 118 119 120 121 extern PetscErrorCode DMDestroy_Private(DM,PetscBool *); 122 123 #undef __FUNCT__ 124 #define __FUNCT__ "DMDestroy_Composite" 125 PetscErrorCode PETSCDM_DLLEXPORT DMDestroy_Composite(DM dm) 126 { 127 PetscErrorCode ierr; 128 struct DMCompositeLink *next, *prev; 129 PetscBool done; 130 DM_Composite *com = (DM_Composite *)dm->data; 131 132 PetscFunctionBegin; 133 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 134 ierr = DMDestroy_Private((DM)dm,&done);CHKERRQ(ierr); 135 if (!done) PetscFunctionReturn(0); 136 137 next = com->next; 138 while (next) { 139 prev = next; 140 next = next->next; 141 if (prev->type == DMCOMPOSITE_DM) { 142 ierr = DMDestroy(prev->dm);CHKERRQ(ierr); 143 } 144 if (prev->grstarts) { 145 ierr = PetscFree(prev->grstarts);CHKERRQ(ierr); 146 } 147 ierr = PetscFree(prev);CHKERRQ(ierr); 148 } 149 ierr = PetscFree(com);CHKERRQ(ierr); 150 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 151 PetscFunctionReturn(0); 152 } 153 154 #undef __FUNCT__ 155 #define __FUNCT__ "DMView_Composite" 156 PetscErrorCode PETSCDM_DLLEXPORT DMView_Composite(DM dm,PetscViewer v) 157 { 158 PetscErrorCode ierr; 159 PetscBool iascii; 160 DM_Composite *com = (DM_Composite *)dm->data; 161 162 PetscFunctionBegin; 163 ierr = PetscTypeCompare((PetscObject)v,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 164 if (iascii) { 165 struct DMCompositeLink *lnk = com->next; 166 PetscInt i; 167 168 ierr = PetscViewerASCIIPrintf(v,"DM (%s)\n",((PetscObject)dm)->prefix?((PetscObject)dm)->prefix:"no prefix");CHKERRQ(ierr); 169 ierr = PetscViewerASCIIPrintf(v," contains %d DMs and %d redundant arrays\n",com->nDM,com->nredundant);CHKERRQ(ierr); 170 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 171 for (i=0; lnk; lnk=lnk->next,i++) { 172 if (lnk->dm) { 173 ierr = PetscViewerASCIIPrintf(v,"Link %d: DM of type %s\n",i,((PetscObject)lnk->dm)->type_name);CHKERRQ(ierr); 174 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 175 ierr = DMView(lnk->dm,v);CHKERRQ(ierr); 176 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 177 } else { 178 ierr = PetscViewerASCIIPrintf(v,"Link %d: Redundant array of size %d owned by rank %d\n",i,lnk->nlocal,lnk->rank);CHKERRQ(ierr); 179 } 180 } 181 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 182 } 183 PetscFunctionReturn(0); 184 } 185 186 /* --------------------------------------------------------------------------------------*/ 187 #undef __FUNCT__ 188 #define __FUNCT__ "DMSetUp_Composite" 189 PetscErrorCode PETSCDM_DLLEXPORT DMSetUp_Composite(DM dm) 190 { 191 PetscErrorCode ierr; 192 PetscInt nprev = 0; 193 PetscMPIInt rank,size; 194 DM_Composite *com = (DM_Composite*)dm->data; 195 struct DMCompositeLink *next = com->next; 196 PetscLayout map; 197 198 PetscFunctionBegin; 199 if (com->setup) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Packer has already been setup"); 200 ierr = PetscLayoutCreate(((PetscObject)dm)->comm,&map);CHKERRQ(ierr); 201 ierr = PetscLayoutSetLocalSize(map,com->n);CHKERRQ(ierr); 202 ierr = PetscLayoutSetSize(map,PETSC_DETERMINE);CHKERRQ(ierr); 203 ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr); 204 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 205 ierr = PetscLayoutGetSize(map,&com->N);CHKERRQ(ierr); 206 ierr = PetscLayoutGetRange(map,&com->rstart,PETSC_NULL);CHKERRQ(ierr); 207 ierr = PetscLayoutDestroy(map);CHKERRQ(ierr); 208 209 /* now set the rstart for each linked array/vector */ 210 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 211 ierr = MPI_Comm_size(((PetscObject)dm)->comm,&size);CHKERRQ(ierr); 212 while (next) { 213 next->rstart = nprev; 214 nprev += next->n; 215 next->grstart = com->rstart + next->rstart; 216 if (next->type == DMCOMPOSITE_ARRAY) { 217 ierr = MPI_Bcast(&next->grstart,1,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 218 } else { 219 ierr = PetscMalloc(size*sizeof(PetscInt),&next->grstarts);CHKERRQ(ierr); 220 ierr = MPI_Allgather(&next->grstart,1,MPIU_INT,next->grstarts,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 221 } 222 next = next->next; 223 } 224 com->setup = PETSC_TRUE; 225 PetscFunctionReturn(0); 226 } 227 228 229 #undef __FUNCT__ 230 #define __FUNCT__ "DMCompositeGetAccess_Array" 231 PetscErrorCode DMCompositeGetAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array) 232 { 233 PetscErrorCode ierr; 234 PetscScalar *varray; 235 PetscMPIInt rank; 236 237 PetscFunctionBegin; 238 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 239 if (array) { 240 if (rank == mine->rank) { 241 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 242 *array = varray + mine->rstart; 243 ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr); 244 } else { 245 *array = 0; 246 } 247 } 248 PetscFunctionReturn(0); 249 } 250 251 #undef __FUNCT__ 252 #define __FUNCT__ "DMCompositeGetAccess_DM" 253 PetscErrorCode DMCompositeGetAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global) 254 { 255 PetscErrorCode ierr; 256 PetscScalar *array; 257 258 PetscFunctionBegin; 259 if (global) { 260 ierr = DMGetGlobalVector(mine->dm,global);CHKERRQ(ierr); 261 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 262 ierr = VecPlaceArray(*global,array+mine->rstart);CHKERRQ(ierr); 263 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 264 } 265 PetscFunctionReturn(0); 266 } 267 268 #undef __FUNCT__ 269 #define __FUNCT__ "DMCompositeRestoreAccess_Array" 270 PetscErrorCode DMCompositeRestoreAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array) 271 { 272 PetscFunctionBegin; 273 PetscFunctionReturn(0); 274 } 275 276 #undef __FUNCT__ 277 #define __FUNCT__ "DMCompositeRestoreAccess_DM" 278 PetscErrorCode DMCompositeRestoreAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global) 279 { 280 PetscErrorCode ierr; 281 282 PetscFunctionBegin; 283 if (global) { 284 ierr = VecResetArray(*global);CHKERRQ(ierr); 285 ierr = DMRestoreGlobalVector(mine->dm,global);CHKERRQ(ierr); 286 } 287 PetscFunctionReturn(0); 288 } 289 290 #undef __FUNCT__ 291 #define __FUNCT__ "DMCompositeScatter_Array" 292 PetscErrorCode DMCompositeScatter_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar *array) 293 { 294 PetscErrorCode ierr; 295 PetscScalar *varray; 296 PetscMPIInt rank; 297 298 PetscFunctionBegin; 299 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 300 if (rank == mine->rank) { 301 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 302 ierr = PetscMemcpy(array,varray+mine->rstart,mine->n*sizeof(PetscScalar));CHKERRQ(ierr); 303 ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr); 304 } 305 ierr = MPI_Bcast(array,mine->nlocal,MPIU_SCALAR,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 306 PetscFunctionReturn(0); 307 } 308 309 #undef __FUNCT__ 310 #define __FUNCT__ "DMCompositeScatter_DM" 311 PetscErrorCode DMCompositeScatter_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec local) 312 { 313 PetscErrorCode ierr; 314 PetscScalar *array; 315 Vec global; 316 317 PetscFunctionBegin; 318 ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr); 319 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 320 ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr); 321 ierr = DMGlobalToLocalBegin(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr); 322 ierr = DMGlobalToLocalEnd(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr); 323 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 324 ierr = VecResetArray(global);CHKERRQ(ierr); 325 ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr); 326 PetscFunctionReturn(0); 327 } 328 329 #undef __FUNCT__ 330 #define __FUNCT__ "DMCompositeGather_Array" 331 PetscErrorCode DMCompositeGather_Array(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,const PetscScalar *array) 332 { 333 PetscErrorCode ierr; 334 PetscScalar *varray; 335 PetscMPIInt rank; 336 337 PetscFunctionBegin; 338 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 339 if (rank == mine->rank) { 340 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 341 if (varray+mine->rstart == array) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You need not DMCompositeGather() into objects obtained via DMCompositeGetAccess()"); 342 } 343 switch (imode) { 344 case INSERT_VALUES: 345 if (rank == mine->rank) { 346 ierr = PetscMemcpy(varray+mine->rstart,array,mine->n*sizeof(PetscScalar));CHKERRQ(ierr); 347 } 348 break; 349 case ADD_VALUES: { 350 PetscInt i; 351 void *source; 352 PetscScalar *buffer,*dest; 353 if (rank == mine->rank) { 354 dest = &varray[mine->rstart]; 355 #if defined(PETSC_HAVE_MPI_IN_PLACE) 356 buffer = dest; 357 source = MPI_IN_PLACE; 358 #else 359 ierr = PetscMalloc(mine->nlocal*sizeof(PetscScalar),&buffer);CHKERRQ(ierr); 360 source = (void *) buffer; 361 #endif 362 for (i=0; i<mine->nlocal; i++) buffer[i] = varray[mine->rstart+i] + array[i]; 363 } else { 364 source = (void *) array; 365 dest = PETSC_NULL; 366 } 367 ierr = MPI_Reduce(source,dest,mine->nlocal,MPIU_SCALAR,MPI_SUM,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 368 #if !defined(PETSC_HAVE_MPI_IN_PLACE) 369 if (rank == mine->rank) {ierr = PetscFree(source);CHKERRQ(ierr);} 370 #endif 371 } break; 372 default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"imode"); 373 } 374 if (rank == mine->rank) {ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr);} 375 PetscFunctionReturn(0); 376 } 377 378 #undef __FUNCT__ 379 #define __FUNCT__ "DMCompositeGather_DM" 380 PetscErrorCode DMCompositeGather_DM(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,Vec local) 381 { 382 PetscErrorCode ierr; 383 PetscScalar *array; 384 Vec global; 385 386 PetscFunctionBegin; 387 ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr); 388 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 389 ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr); 390 ierr = DMLocalToGlobalBegin(mine->dm,local,imode,global);CHKERRQ(ierr); 391 ierr = DMLocalToGlobalEnd(mine->dm,local,imode,global);CHKERRQ(ierr); 392 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 393 ierr = VecResetArray(global);CHKERRQ(ierr); 394 ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr); 395 PetscFunctionReturn(0); 396 } 397 398 /* ----------------------------------------------------------------------------------*/ 399 400 #include <stdarg.h> 401 402 #undef __FUNCT__ 403 #define __FUNCT__ "DMCompositeGetNumberDM" 404 /*@C 405 DMCompositeGetNumberDM - Get's the number of DM objects in the DMComposite 406 representation. 407 408 Not Collective 409 410 Input Parameter: 411 . dm - the packer object 412 413 Output Parameter: 414 . nDM - the number of DMs 415 416 Level: beginner 417 418 @*/ 419 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetNumberDM(DM dm,PetscInt *nDM) 420 { 421 DM_Composite *com = (DM_Composite*)dm->data; 422 PetscFunctionBegin; 423 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 424 *nDM = com->nDM; 425 PetscFunctionReturn(0); 426 } 427 428 429 #undef __FUNCT__ 430 #define __FUNCT__ "DMCompositeGetAccess" 431 /*@C 432 DMCompositeGetAccess - Allows one to access the individual packed vectors in their global 433 representation. 434 435 Collective on DMComposite 436 437 Input Parameter: 438 + dm - the packer object 439 . gvec - the global vector 440 - ... - the individual sequential or parallel objects (arrays or vectors) 441 442 Notes: Use DMCompositeRestoreAccess() to return the vectors when you no longer need them 443 444 Level: advanced 445 446 @*/ 447 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetAccess(DM dm,Vec gvec,...) 448 { 449 va_list Argp; 450 PetscErrorCode ierr; 451 struct DMCompositeLink *next; 452 DM_Composite *com = (DM_Composite*)dm->data; 453 454 PetscFunctionBegin; 455 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 456 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 457 next = com->next; 458 if (!com->setup) { 459 ierr = DMSetUp(dm);CHKERRQ(ierr); 460 } 461 462 /* loop over packed objects, handling one at at time */ 463 va_start(Argp,gvec); 464 while (next) { 465 if (next->type == DMCOMPOSITE_ARRAY) { 466 PetscScalar **array; 467 array = va_arg(Argp, PetscScalar**); 468 ierr = DMCompositeGetAccess_Array(dm,next,gvec,array);CHKERRQ(ierr); 469 } else if (next->type == DMCOMPOSITE_DM) { 470 Vec *vec; 471 vec = va_arg(Argp, Vec*); 472 ierr = DMCompositeGetAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr); 473 } else { 474 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 475 } 476 next = next->next; 477 } 478 va_end(Argp); 479 PetscFunctionReturn(0); 480 } 481 482 #undef __FUNCT__ 483 #define __FUNCT__ "DMCompositeRestoreAccess" 484 /*@C 485 DMCompositeRestoreAccess - Returns the vectors obtained with DMCompositeGetAccess() 486 representation. 487 488 Collective on DMComposite 489 490 Input Parameter: 491 + dm - the packer object 492 . gvec - the global vector 493 - ... - the individual sequential or parallel objects (arrays or vectors) 494 495 Level: advanced 496 497 .seealso DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 498 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeScatter(), 499 DMCompositeRestoreAccess(), DMCompositeGetAccess() 500 501 @*/ 502 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreAccess(DM dm,Vec gvec,...) 503 { 504 va_list Argp; 505 PetscErrorCode ierr; 506 struct DMCompositeLink *next; 507 DM_Composite *com = (DM_Composite*)dm->data; 508 509 PetscFunctionBegin; 510 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 511 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 512 next = com->next; 513 if (!com->setup) { 514 ierr = DMSetUp(dm);CHKERRQ(ierr); 515 } 516 517 /* loop over packed objects, handling one at at time */ 518 va_start(Argp,gvec); 519 while (next) { 520 if (next->type == DMCOMPOSITE_ARRAY) { 521 PetscScalar **array; 522 array = va_arg(Argp, PetscScalar**); 523 ierr = DMCompositeRestoreAccess_Array(dm,next,gvec,array);CHKERRQ(ierr); 524 } else if (next->type == DMCOMPOSITE_DM) { 525 Vec *vec; 526 vec = va_arg(Argp, Vec*); 527 ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr); 528 } else { 529 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 530 } 531 next = next->next; 532 } 533 va_end(Argp); 534 PetscFunctionReturn(0); 535 } 536 537 #undef __FUNCT__ 538 #define __FUNCT__ "DMCompositeScatter" 539 /*@C 540 DMCompositeScatter - Scatters from a global packed vector into its individual local vectors 541 542 Collective on DMComposite 543 544 Input Parameter: 545 + dm - the packer object 546 . gvec - the global vector 547 - ... - the individual sequential objects (arrays or vectors), PETSC_NULL for those that are not needed 548 549 Level: advanced 550 551 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 552 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 553 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 554 555 @*/ 556 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeScatter(DM dm,Vec gvec,...) 557 { 558 va_list Argp; 559 PetscErrorCode ierr; 560 struct DMCompositeLink *next; 561 PetscInt cnt; 562 DM_Composite *com = (DM_Composite*)dm->data; 563 564 PetscFunctionBegin; 565 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 566 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 567 if (!com->setup) { 568 ierr = DMSetUp(dm);CHKERRQ(ierr); 569 } 570 571 /* loop over packed objects, handling one at at time */ 572 va_start(Argp,gvec); 573 for (cnt=3,next=com->next; next; cnt++,next=next->next) { 574 if (next->type == DMCOMPOSITE_ARRAY) { 575 PetscScalar *array; 576 array = va_arg(Argp, PetscScalar*); 577 if (array) PetscValidScalarPointer(array,cnt); 578 PetscValidLogicalCollectiveBool(dm,!!array,cnt); 579 if (!array) continue; 580 ierr = DMCompositeScatter_Array(dm,next,gvec,array);CHKERRQ(ierr); 581 } else if (next->type == DMCOMPOSITE_DM) { 582 Vec vec; 583 vec = va_arg(Argp, Vec); 584 if (!vec) continue; 585 PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt); 586 ierr = DMCompositeScatter_DM(dm,next,gvec,vec);CHKERRQ(ierr); 587 } else { 588 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 589 } 590 } 591 va_end(Argp); 592 PetscFunctionReturn(0); 593 } 594 595 #undef __FUNCT__ 596 #define __FUNCT__ "DMCompositeGather" 597 /*@C 598 DMCompositeGather - Gathers into a global packed vector from its individual local vectors 599 600 Collective on DMComposite 601 602 Input Parameter: 603 + dm - the packer object 604 . gvec - the global vector 605 - ... - the individual sequential objects (arrays or vectors), PETSC_NULL for any that are not needed 606 607 Level: advanced 608 609 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 610 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 611 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 612 613 @*/ 614 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGather(DM dm,Vec gvec,InsertMode imode,...) 615 { 616 va_list Argp; 617 PetscErrorCode ierr; 618 struct DMCompositeLink *next; 619 DM_Composite *com = (DM_Composite*)dm->data; 620 PetscInt cnt; 621 622 PetscFunctionBegin; 623 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 624 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 625 if (!com->setup) { 626 ierr = DMSetUp(dm);CHKERRQ(ierr); 627 } 628 629 /* loop over packed objects, handling one at at time */ 630 va_start(Argp,imode); 631 for (cnt=3,next=com->next; next; cnt++,next=next->next) { 632 if (next->type == DMCOMPOSITE_ARRAY) { 633 PetscScalar *array; 634 array = va_arg(Argp, PetscScalar*); 635 if (!array) continue; 636 PetscValidScalarPointer(array,cnt); 637 ierr = DMCompositeGather_Array(dm,next,gvec,imode,array);CHKERRQ(ierr); 638 } else if (next->type == DMCOMPOSITE_DM) { 639 Vec vec; 640 vec = va_arg(Argp, Vec); 641 if (!vec) continue; 642 PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt); 643 ierr = DMCompositeGather_DM(dm,next,gvec,imode,vec);CHKERRQ(ierr); 644 } else { 645 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 646 } 647 } 648 va_end(Argp); 649 PetscFunctionReturn(0); 650 } 651 652 #undef __FUNCT__ 653 #define __FUNCT__ "DMCompositeAddArray" 654 /*@C 655 DMCompositeAddArray - adds an "redundant" array to a DMComposite. The array values will 656 be stored in part of the array on process orank. 657 658 Collective on DMComposite 659 660 Input Parameter: 661 + dm - the packer object 662 . orank - the process on which the array entries officially live, this number must be 663 the same on all processes. 664 - n - the length of the array 665 666 Level: advanced 667 668 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(), 669 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 670 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 671 672 @*/ 673 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddArray(DM dm,PetscMPIInt orank,PetscInt n) 674 { 675 struct DMCompositeLink *mine,*next; 676 PetscErrorCode ierr; 677 PetscMPIInt rank; 678 DM_Composite *com = (DM_Composite*)dm->data; 679 680 PetscFunctionBegin; 681 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 682 next = com->next; 683 if (com->setup) { 684 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add an array once you have used the DMComposite"); 685 } 686 #if defined(PETSC_USE_DEBUG) 687 { 688 PetscMPIInt orankmax; 689 ierr = MPI_Allreduce(&orank,&orankmax,1,MPI_INT,MPI_MAX,((PetscObject)dm)->comm);CHKERRQ(ierr); 690 if (orank != orankmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"orank %d must be equal on all processes, another process has value %d",orank,orankmax); 691 } 692 #endif 693 694 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 695 /* create new link */ 696 ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr); 697 mine->nlocal = n; 698 mine->n = (rank == orank) ? n : 0; 699 mine->rank = orank; 700 mine->dm = PETSC_NULL; 701 mine->type = DMCOMPOSITE_ARRAY; 702 mine->next = PETSC_NULL; 703 if (rank == mine->rank) {com->n += n;com->nmine++;} 704 705 /* add to end of list */ 706 if (!next) { 707 com->next = mine; 708 } else { 709 while (next->next) next = next->next; 710 next->next = mine; 711 } 712 com->nredundant++; 713 PetscFunctionReturn(0); 714 } 715 716 #undef __FUNCT__ 717 #define __FUNCT__ "DMCompositeAddDM" 718 /*@C 719 DMCompositeAddDM - adds a DM vector to a DMComposite 720 721 Collective on DMComposite 722 723 Input Parameter: 724 + dm - the packer object 725 - dm - the DM object, if the DM is a da you will need to caste it with a (DM) 726 727 Level: advanced 728 729 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(), 730 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 731 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 732 733 @*/ 734 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddDM(DM dmc,DM dm) 735 { 736 PetscErrorCode ierr; 737 PetscInt n,nlocal; 738 struct DMCompositeLink *mine,*next; 739 Vec global,local; 740 DM_Composite *com = (DM_Composite*)dmc->data; 741 742 PetscFunctionBegin; 743 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 744 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 745 next = com->next; 746 if (com->setup) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add a DM once you have used the DMComposite"); 747 748 /* create new link */ 749 ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr); 750 ierr = PetscObjectReference((PetscObject)dm);CHKERRQ(ierr); 751 ierr = DMGetGlobalVector(dm,&global);CHKERRQ(ierr); 752 ierr = VecGetLocalSize(global,&n);CHKERRQ(ierr); 753 ierr = DMRestoreGlobalVector(dm,&global);CHKERRQ(ierr); 754 ierr = DMGetLocalVector(dm,&local);CHKERRQ(ierr); 755 ierr = VecGetSize(local,&nlocal);CHKERRQ(ierr); 756 ierr = DMRestoreLocalVector(dm,&local);CHKERRQ(ierr); 757 mine->n = n; 758 mine->nlocal = nlocal; 759 mine->dm = dm; 760 mine->type = DMCOMPOSITE_DM; 761 mine->next = PETSC_NULL; 762 com->n += n; 763 764 /* add to end of list */ 765 if (!next) { 766 com->next = mine; 767 } else { 768 while (next->next) next = next->next; 769 next->next = mine; 770 } 771 com->nDM++; 772 com->nmine++; 773 PetscFunctionReturn(0); 774 } 775 776 extern PetscErrorCode PETSCDM_DLLEXPORT VecView_MPI(Vec,PetscViewer); 777 EXTERN_C_BEGIN 778 #undef __FUNCT__ 779 #define __FUNCT__ "VecView_DMComposite" 780 PetscErrorCode PETSCDM_DLLEXPORT VecView_DMComposite(Vec gvec,PetscViewer viewer) 781 { 782 DM dm; 783 PetscErrorCode ierr; 784 struct DMCompositeLink *next; 785 PetscBool isdraw; 786 DM_Composite *com; 787 788 PetscFunctionBegin; 789 ierr = PetscObjectQuery((PetscObject)gvec,"DMComposite",(PetscObject*)&dm);CHKERRQ(ierr); 790 if (!dm) SETERRQ(((PetscObject)gvec)->comm,PETSC_ERR_ARG_WRONG,"Vector not generated from a DMComposite"); 791 com = (DM_Composite*)dm->data; 792 next = com->next; 793 794 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 795 if (!isdraw) { 796 /* do I really want to call this? */ 797 ierr = VecView_MPI(gvec,viewer);CHKERRQ(ierr); 798 } else { 799 PetscInt cnt = 0; 800 801 /* loop over packed objects, handling one at at time */ 802 while (next) { 803 if (next->type == DMCOMPOSITE_ARRAY) { 804 PetscScalar *array; 805 ierr = DMCompositeGetAccess_Array(dm,next,gvec,&array);CHKERRQ(ierr); 806 807 /*skip it for now */ 808 } else if (next->type == DMCOMPOSITE_DM) { 809 Vec vec; 810 PetscInt bs; 811 812 ierr = DMCompositeGetAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr); 813 ierr = VecView(vec,viewer);CHKERRQ(ierr); 814 ierr = VecGetBlockSize(vec,&bs);CHKERRQ(ierr); 815 ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr); 816 ierr = PetscViewerDrawBaseAdd(viewer,bs);CHKERRQ(ierr); 817 cnt += bs; 818 } else { 819 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 820 } 821 next = next->next; 822 } 823 ierr = PetscViewerDrawBaseAdd(viewer,-cnt);CHKERRQ(ierr); 824 } 825 PetscFunctionReturn(0); 826 } 827 EXTERN_C_END 828 829 830 #undef __FUNCT__ 831 #define __FUNCT__ "DMCreateGlobalVector_Composite" 832 PetscErrorCode PETSCDM_DLLEXPORT DMCreateGlobalVector_Composite(DM dm,Vec *gvec) 833 { 834 PetscErrorCode ierr; 835 DM_Composite *com = (DM_Composite*)dm->data; 836 837 PetscFunctionBegin; 838 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 839 if (!com->setup) { 840 ierr = DMSetUp(dm);CHKERRQ(ierr); 841 } 842 ierr = VecCreateMPI(((PetscObject)dm)->comm,com->n,com->N,gvec);CHKERRQ(ierr); 843 ierr = PetscObjectCompose((PetscObject)*gvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr); 844 ierr = VecSetOperation(*gvec,VECOP_VIEW,(void(*)(void))VecView_DMComposite);CHKERRQ(ierr); 845 PetscFunctionReturn(0); 846 } 847 848 #undef __FUNCT__ 849 #define __FUNCT__ "DMCreateLocalVector_Composite" 850 PetscErrorCode PETSCDM_DLLEXPORT DMCreateLocalVector_Composite(DM dm,Vec *lvec) 851 { 852 PetscErrorCode ierr; 853 DM_Composite *com = (DM_Composite*)dm->data; 854 855 PetscFunctionBegin; 856 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 857 if (!com->setup) { 858 ierr = DMSetUp(dm);CHKERRQ(ierr); 859 } 860 ierr = VecCreateSeq(((PetscObject)dm)->comm,com->nghost,lvec);CHKERRQ(ierr); 861 ierr = PetscObjectCompose((PetscObject)*lvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr); 862 PetscFunctionReturn(0); 863 } 864 865 #undef __FUNCT__ 866 #define __FUNCT__ "DMCompositeGetISLocalToGlobalMappings" 867 /*@C 868 DMCompositeGetISLocalToGlobalMappings - gets an ISLocalToGlobalMapping for each DM/array in the DMComposite, maps to the composite global space 869 870 Collective on DM 871 872 Input Parameter: 873 . dm - the packer object 874 875 Output Parameters: 876 . ltogs - the individual mappings for each packed vector/array. Note that this includes 877 all the ghost points that individual ghosted DMDA's may have. Also each process has an 878 mapping for EACH redundant array (not just the local redundant arrays). 879 880 Level: advanced 881 882 Notes: 883 Each entry of ltogs should be destroyed with ISLocalToGlobalMappingDestroy(), the ltogs array should be freed with PetscFree(). 884 885 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 886 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(), 887 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries() 888 889 @*/ 890 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetISLocalToGlobalMappings(DM dm,ISLocalToGlobalMapping **ltogs) 891 { 892 PetscErrorCode ierr; 893 PetscInt i,*idx,n,cnt; 894 struct DMCompositeLink *next; 895 PetscMPIInt rank; 896 DM_Composite *com = (DM_Composite*)dm->data; 897 898 PetscFunctionBegin; 899 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 900 ierr = PetscMalloc((com->nDM+com->nredundant)*sizeof(ISLocalToGlobalMapping),ltogs);CHKERRQ(ierr); 901 next = com->next; 902 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 903 904 /* loop over packed objects, handling one at at time */ 905 cnt = 0; 906 while (next) { 907 if (next->type == DMCOMPOSITE_ARRAY) { 908 ierr = PetscMalloc(next->nlocal*sizeof(PetscInt),&idx);CHKERRQ(ierr); 909 if (rank == next->rank) { 910 for (i=0; i<next->nlocal; i++) idx[i] = next->grstart + i; 911 } 912 ierr = MPI_Bcast(idx,next->nlocal,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 913 ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,next->nlocal,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr); 914 } else if (next->type == DMCOMPOSITE_DM) { 915 ISLocalToGlobalMapping ltog; 916 PetscMPIInt size; 917 const PetscInt *suboff,*indices; 918 Vec global; 919 920 /* Get sub-DM global indices for each local dof */ 921 ierr = DMGetLocalToGlobalMapping(next->dm,<og);CHKERRQ(ierr); 922 ierr = ISLocalToGlobalMappingGetSize(ltog,&n);CHKERRQ(ierr); 923 ierr = ISLocalToGlobalMappingGetIndices(ltog,&indices);CHKERRQ(ierr); 924 ierr = PetscMalloc(n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 925 926 /* Get the offsets for the sub-DM global vector */ 927 ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr); 928 ierr = VecGetOwnershipRanges(global,&suboff);CHKERRQ(ierr); 929 ierr = MPI_Comm_size(((PetscObject)global)->comm,&size);CHKERRQ(ierr); 930 931 /* Shift the sub-DM definition of the global space to the composite global space */ 932 for (i=0; i<n; i++) { 933 PetscInt subi = indices[i],lo = 0,hi = size,t; 934 /* Binary search to find which rank owns subi */ 935 while (hi-lo > 1) { 936 t = lo + (hi-lo)/2; 937 if (suboff[t] > subi) hi = t; 938 else lo = t; 939 } 940 idx[i] = subi - suboff[lo] + next->grstarts[lo]; 941 } 942 ierr = ISLocalToGlobalMappingRestoreIndices(ltog,&indices);CHKERRQ(ierr); 943 ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,n,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr); 944 ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr); 945 } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 946 next = next->next; 947 cnt++; 948 } 949 PetscFunctionReturn(0); 950 } 951 952 #undef __FUNCT__ 953 #define __FUNCT__ "DMCompositeGetLocalISs" 954 /*@C 955 DMCompositeGetLocalISs - Gets index sets for each DM/array component of a composite local vector 956 957 Not Collective 958 959 Input Arguments: 960 . dm - composite DM 961 962 Output Arguments: 963 . is - array of serial index sets for each each component of the DMComposite 964 965 Level: intermediate 966 967 Notes: 968 At present, a composite local vector does not normally exist. This function is used to provide index sets for 969 MatGetLocalSubMatrix(). In the future, the scatters for each entry in the DMComposite may be be merged into a single 970 scatter to a composite local vector. 971 972 To get the composite global indices at all local points (including ghosts), use DMCompositeGetISLocalToGlobalMappings(). 973 974 To get index sets for pieces of the composite global vector, use DMCompositeGetGlobalISs(). 975 976 Each returned IS should be destroyed with ISDestroy(), the array should be freed with PetscFree(). 977 978 .seealso: DMCompositeGetGlobalISs(), DMCompositeGetISLocalToGlobalMappings(), MatGetLocalSubMatrix(), MatCreateLocalRef() 979 @*/ 980 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalISs(DM dm,IS **is) 981 { 982 PetscErrorCode ierr; 983 DM_Composite *com = (DM_Composite*)dm->data; 984 struct DMCompositeLink *link; 985 PetscInt cnt,start; 986 987 PetscFunctionBegin; 988 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 989 PetscValidPointer(is,2); 990 ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr); 991 for (cnt=0,start=0,link=com->next; link; start+=link->nlocal,cnt++,link=link->next) { 992 ierr = ISCreateStride(PETSC_COMM_SELF,link->nlocal,start,1,&(*is)[cnt]);CHKERRQ(ierr); 993 if (link->type == DMCOMPOSITE_DM) { 994 PetscInt bs; 995 ierr = DMGetBlockSize(link->dm,&bs);CHKERRQ(ierr); 996 ierr = ISSetBlockSize((*is)[cnt],bs);CHKERRQ(ierr); 997 } 998 } 999 PetscFunctionReturn(0); 1000 } 1001 1002 #undef __FUNCT__ 1003 #define __FUNCT__ "DMCompositeGetGlobalISs" 1004 /*@C 1005 DMCompositeGetGlobalISs - Gets the index sets for each composed object 1006 1007 Collective on DMComposite 1008 1009 Input Parameter: 1010 . dm - the packer object 1011 1012 Output Parameters: 1013 . is - the array of index sets 1014 1015 Level: advanced 1016 1017 Notes: 1018 The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree() 1019 1020 These could be used to extract a subset of vector entries for a "multi-physics" preconditioner 1021 1022 Use DMCompositeGetLocalISs() for index sets in the packed local numbering, and 1023 DMCompositeGetISLocalToGlobalMappings() for to map local sub-DM (including ghost) indices to packed global 1024 indices. 1025 1026 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1027 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(), 1028 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries() 1029 1030 @*/ 1031 1032 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetGlobalISs(DM dm,IS *is[]) 1033 { 1034 PetscErrorCode ierr; 1035 PetscInt cnt = 0,*idx,i; 1036 struct DMCompositeLink *next; 1037 PetscMPIInt rank; 1038 DM_Composite *com = (DM_Composite*)dm->data; 1039 1040 PetscFunctionBegin; 1041 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1042 ierr = PetscMalloc((com->nDM+com->nredundant)*sizeof(IS),is);CHKERRQ(ierr); 1043 next = com->next; 1044 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1045 1046 /* loop over packed objects, handling one at at time */ 1047 while (next) { 1048 ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1049 for (i=0; i<next->n; i++) idx[i] = next->grstart + i; 1050 ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr); 1051 cnt++; 1052 next = next->next; 1053 } 1054 PetscFunctionReturn(0); 1055 } 1056 1057 /* -------------------------------------------------------------------------------------*/ 1058 #undef __FUNCT__ 1059 #define __FUNCT__ "DMCompositeGetLocalVectors_Array" 1060 PetscErrorCode DMCompositeGetLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array) 1061 { 1062 PetscErrorCode ierr; 1063 PetscFunctionBegin; 1064 if (array) { 1065 ierr = PetscMalloc(mine->nlocal*sizeof(PetscScalar),array);CHKERRQ(ierr); 1066 } 1067 PetscFunctionReturn(0); 1068 } 1069 1070 #undef __FUNCT__ 1071 #define __FUNCT__ "DMCompositeGetLocalVectors_DM" 1072 PetscErrorCode DMCompositeGetLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local) 1073 { 1074 PetscErrorCode ierr; 1075 PetscFunctionBegin; 1076 if (local) { 1077 ierr = DMGetLocalVector(mine->dm,local);CHKERRQ(ierr); 1078 } 1079 PetscFunctionReturn(0); 1080 } 1081 1082 #undef __FUNCT__ 1083 #define __FUNCT__ "DMCompositeRestoreLocalVectors_Array" 1084 PetscErrorCode DMCompositeRestoreLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array) 1085 { 1086 PetscErrorCode ierr; 1087 PetscFunctionBegin; 1088 if (array) { 1089 ierr = PetscFree(*array);CHKERRQ(ierr); 1090 } 1091 PetscFunctionReturn(0); 1092 } 1093 1094 #undef __FUNCT__ 1095 #define __FUNCT__ "DMCompositeRestoreLocalVectors_DM" 1096 PetscErrorCode DMCompositeRestoreLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local) 1097 { 1098 PetscErrorCode ierr; 1099 PetscFunctionBegin; 1100 if (local) { 1101 ierr = DMRestoreLocalVector(mine->dm,local);CHKERRQ(ierr); 1102 } 1103 PetscFunctionReturn(0); 1104 } 1105 1106 #undef __FUNCT__ 1107 #define __FUNCT__ "DMCompositeGetLocalVectors" 1108 /*@C 1109 DMCompositeGetLocalVectors - Gets local vectors and arrays for each part of a DMComposite. 1110 Use DMCompositeRestoreLocalVectors() to return them. 1111 1112 Not Collective 1113 1114 Input Parameter: 1115 . dm - the packer object 1116 1117 Output Parameter: 1118 . ... - the individual sequential objects (arrays or vectors) 1119 1120 Level: advanced 1121 1122 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1123 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1124 DMCompositeRestoreLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries() 1125 1126 @*/ 1127 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalVectors(DM dm,...) 1128 { 1129 va_list Argp; 1130 PetscErrorCode ierr; 1131 struct DMCompositeLink *next; 1132 DM_Composite *com = (DM_Composite*)dm->data; 1133 1134 PetscFunctionBegin; 1135 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1136 next = com->next; 1137 /* loop over packed objects, handling one at at time */ 1138 va_start(Argp,dm); 1139 while (next) { 1140 if (next->type == DMCOMPOSITE_ARRAY) { 1141 PetscScalar **array; 1142 array = va_arg(Argp, PetscScalar**); 1143 ierr = DMCompositeGetLocalVectors_Array(dm,next,array);CHKERRQ(ierr); 1144 } else if (next->type == DMCOMPOSITE_DM) { 1145 Vec *vec; 1146 vec = va_arg(Argp, Vec*); 1147 ierr = DMCompositeGetLocalVectors_DM(dm,next,vec);CHKERRQ(ierr); 1148 } else { 1149 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1150 } 1151 next = next->next; 1152 } 1153 va_end(Argp); 1154 PetscFunctionReturn(0); 1155 } 1156 1157 #undef __FUNCT__ 1158 #define __FUNCT__ "DMCompositeRestoreLocalVectors" 1159 /*@C 1160 DMCompositeRestoreLocalVectors - Restores local vectors and arrays for each part of a DMComposite. 1161 1162 Not Collective 1163 1164 Input Parameter: 1165 . dm - the packer object 1166 1167 Output Parameter: 1168 . ... - the individual sequential objects (arrays or vectors) 1169 1170 Level: advanced 1171 1172 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1173 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1174 DMCompositeGetLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries() 1175 1176 @*/ 1177 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreLocalVectors(DM dm,...) 1178 { 1179 va_list Argp; 1180 PetscErrorCode ierr; 1181 struct DMCompositeLink *next; 1182 DM_Composite *com = (DM_Composite*)dm->data; 1183 1184 PetscFunctionBegin; 1185 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1186 next = com->next; 1187 /* loop over packed objects, handling one at at time */ 1188 va_start(Argp,dm); 1189 while (next) { 1190 if (next->type == DMCOMPOSITE_ARRAY) { 1191 PetscScalar **array; 1192 array = va_arg(Argp, PetscScalar**); 1193 ierr = DMCompositeRestoreLocalVectors_Array(dm,next,array);CHKERRQ(ierr); 1194 } else if (next->type == DMCOMPOSITE_DM) { 1195 Vec *vec; 1196 vec = va_arg(Argp, Vec*); 1197 ierr = DMCompositeRestoreLocalVectors_DM(dm,next,vec);CHKERRQ(ierr); 1198 } else { 1199 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1200 } 1201 next = next->next; 1202 } 1203 va_end(Argp); 1204 PetscFunctionReturn(0); 1205 } 1206 1207 /* -------------------------------------------------------------------------------------*/ 1208 #undef __FUNCT__ 1209 #define __FUNCT__ "DMCompositeGetEntries_Array" 1210 PetscErrorCode DMCompositeGetEntries_Array(DM dm,struct DMCompositeLink *mine,PetscInt *n) 1211 { 1212 PetscFunctionBegin; 1213 if (n) *n = mine->nlocal; 1214 PetscFunctionReturn(0); 1215 } 1216 1217 #undef __FUNCT__ 1218 #define __FUNCT__ "DMCompositeGetEntries_DM" 1219 PetscErrorCode DMCompositeGetEntries_DM(DM dmi,struct DMCompositeLink *mine,DM *dm) 1220 { 1221 PetscFunctionBegin; 1222 if (dm) *dm = mine->dm; 1223 PetscFunctionReturn(0); 1224 } 1225 1226 #undef __FUNCT__ 1227 #define __FUNCT__ "DMCompositeGetEntries" 1228 /*@C 1229 DMCompositeGetEntries - Gets the DM, redundant size, etc for each entry in a DMComposite. 1230 1231 Not Collective 1232 1233 Input Parameter: 1234 . dm - the packer object 1235 1236 Output Parameter: 1237 . ... - the individual entries, DMs or integer sizes) 1238 1239 Level: advanced 1240 1241 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1242 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1243 DMCompositeRestoreLocalVectors(), DMCompositeGetLocalVectors(), DMCompositeScatter(), 1244 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors() 1245 1246 @*/ 1247 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetEntries(DM dm,...) 1248 { 1249 va_list Argp; 1250 PetscErrorCode ierr; 1251 struct DMCompositeLink *next; 1252 DM_Composite *com = (DM_Composite*)dm->data; 1253 1254 PetscFunctionBegin; 1255 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1256 next = com->next; 1257 /* loop over packed objects, handling one at at time */ 1258 va_start(Argp,dm); 1259 while (next) { 1260 if (next->type == DMCOMPOSITE_ARRAY) { 1261 PetscInt *n; 1262 n = va_arg(Argp, PetscInt*); 1263 ierr = DMCompositeGetEntries_Array(dm,next,n);CHKERRQ(ierr); 1264 } else if (next->type == DMCOMPOSITE_DM) { 1265 DM *dmn; 1266 dmn = va_arg(Argp, DM*); 1267 ierr = DMCompositeGetEntries_DM(dm,next,dmn);CHKERRQ(ierr); 1268 } else { 1269 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1270 } 1271 next = next->next; 1272 } 1273 va_end(Argp); 1274 PetscFunctionReturn(0); 1275 } 1276 1277 #undef __FUNCT__ 1278 #define __FUNCT__ "DMRefine_Composite" 1279 PetscErrorCode PETSCDM_DLLEXPORT DMRefine_Composite(DM dmi,MPI_Comm comm,DM *fine) 1280 { 1281 PetscErrorCode ierr; 1282 struct DMCompositeLink *next; 1283 DM_Composite *com = (DM_Composite*)dmi->data; 1284 DM dm; 1285 1286 PetscFunctionBegin; 1287 PetscValidHeaderSpecific(dmi,DM_CLASSID,1); 1288 next = com->next; 1289 ierr = DMCompositeCreate(comm,fine);CHKERRQ(ierr); 1290 1291 /* loop over packed objects, handling one at at time */ 1292 while (next) { 1293 if (next->type == DMCOMPOSITE_ARRAY) { 1294 ierr = DMCompositeAddArray(*fine,next->rank,next->nlocal);CHKERRQ(ierr); 1295 } else if (next->type == DMCOMPOSITE_DM) { 1296 ierr = DMRefine(next->dm,comm,&dm);CHKERRQ(ierr); 1297 ierr = DMCompositeAddDM(*fine,dm);CHKERRQ(ierr); 1298 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 1299 } else { 1300 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1301 } 1302 next = next->next; 1303 } 1304 PetscFunctionReturn(0); 1305 } 1306 1307 #include "petscmat.h" 1308 1309 struct MatPackLink { 1310 Mat A; 1311 struct MatPackLink *next; 1312 }; 1313 1314 struct MatPack { 1315 DM right,left; 1316 struct MatPackLink *next; 1317 }; 1318 1319 #undef __FUNCT__ 1320 #define __FUNCT__ "MatMultBoth_Shell_Pack" 1321 PetscErrorCode MatMultBoth_Shell_Pack(Mat A,Vec x,Vec y,PetscBool add) 1322 { 1323 struct MatPack *mpack; 1324 struct DMCompositeLink *xnext,*ynext; 1325 struct MatPackLink *anext; 1326 PetscScalar *xarray,*yarray; 1327 PetscErrorCode ierr; 1328 PetscInt i; 1329 Vec xglobal,yglobal; 1330 PetscMPIInt rank; 1331 DM_Composite *comright; 1332 DM_Composite *comleft; 1333 1334 PetscFunctionBegin; 1335 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1336 ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr); 1337 comright = (DM_Composite*)mpack->right->data; 1338 comleft = (DM_Composite*)mpack->left->data; 1339 xnext = comright->next; 1340 ynext = comleft->next; 1341 anext = mpack->next; 1342 1343 while (xnext) { 1344 if (xnext->type == DMCOMPOSITE_ARRAY) { 1345 if (rank == xnext->rank) { 1346 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1347 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1348 if (add) { 1349 for (i=0; i<xnext->n; i++) { 1350 yarray[ynext->rstart+i] += xarray[xnext->rstart+i]; 1351 } 1352 } else { 1353 ierr = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr); 1354 } 1355 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1356 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1357 } 1358 } else if (xnext->type == DMCOMPOSITE_DM) { 1359 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1360 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1361 ierr = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1362 ierr = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1363 ierr = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr); 1364 ierr = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr); 1365 if (add) { 1366 ierr = MatMultAdd(anext->A,xglobal,yglobal,yglobal);CHKERRQ(ierr); 1367 } else { 1368 ierr = MatMult(anext->A,xglobal,yglobal);CHKERRQ(ierr); 1369 } 1370 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1371 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1372 ierr = VecResetArray(xglobal);CHKERRQ(ierr); 1373 ierr = VecResetArray(yglobal);CHKERRQ(ierr); 1374 ierr = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1375 ierr = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1376 anext = anext->next; 1377 } else { 1378 SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1379 } 1380 xnext = xnext->next; 1381 ynext = ynext->next; 1382 } 1383 PetscFunctionReturn(0); 1384 } 1385 1386 #undef __FUNCT__ 1387 #define __FUNCT__ "MatMultAdd_Shell_Pack" 1388 PetscErrorCode MatMultAdd_Shell_Pack(Mat A,Vec x,Vec y,Vec z) 1389 { 1390 PetscErrorCode ierr; 1391 PetscFunctionBegin; 1392 if (z != y) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Handles y == z only"); 1393 ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_TRUE);CHKERRQ(ierr); 1394 PetscFunctionReturn(0); 1395 } 1396 1397 #undef __FUNCT__ 1398 #define __FUNCT__ "MatMult_Shell_Pack" 1399 PetscErrorCode MatMult_Shell_Pack(Mat A,Vec x,Vec y) 1400 { 1401 PetscErrorCode ierr; 1402 PetscFunctionBegin; 1403 ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_FALSE);CHKERRQ(ierr); 1404 PetscFunctionReturn(0); 1405 } 1406 1407 #undef __FUNCT__ 1408 #define __FUNCT__ "MatMultTranspose_Shell_Pack" 1409 PetscErrorCode MatMultTranspose_Shell_Pack(Mat A,Vec x,Vec y) 1410 { 1411 struct MatPack *mpack; 1412 struct DMCompositeLink *xnext,*ynext; 1413 struct MatPackLink *anext; 1414 PetscScalar *xarray,*yarray; 1415 PetscErrorCode ierr; 1416 Vec xglobal,yglobal; 1417 PetscMPIInt rank; 1418 DM_Composite *comright; 1419 DM_Composite *comleft; 1420 1421 PetscFunctionBegin; 1422 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1423 ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr); 1424 comright = (DM_Composite*)mpack->right->data; 1425 comleft = (DM_Composite*)mpack->left->data; 1426 ynext = comright->next; 1427 xnext = comleft->next; 1428 anext = mpack->next; 1429 1430 while (xnext) { 1431 if (xnext->type == DMCOMPOSITE_ARRAY) { 1432 if (rank == ynext->rank) { 1433 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1434 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1435 ierr = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr); 1436 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1437 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1438 } 1439 } else if (xnext->type == DMCOMPOSITE_DM) { 1440 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1441 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1442 ierr = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1443 ierr = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1444 ierr = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr); 1445 ierr = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr); 1446 ierr = MatMultTranspose(anext->A,xglobal,yglobal);CHKERRQ(ierr); 1447 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1448 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1449 ierr = VecResetArray(xglobal);CHKERRQ(ierr); 1450 ierr = VecResetArray(yglobal);CHKERRQ(ierr); 1451 ierr = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1452 ierr = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1453 anext = anext->next; 1454 } else { 1455 SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1456 } 1457 xnext = xnext->next; 1458 ynext = ynext->next; 1459 } 1460 PetscFunctionReturn(0); 1461 } 1462 1463 #undef __FUNCT__ 1464 #define __FUNCT__ "MatDestroy_Shell_Pack" 1465 PetscErrorCode MatDestroy_Shell_Pack(Mat A) 1466 { 1467 struct MatPack *mpack; 1468 struct MatPackLink *anext,*oldanext; 1469 PetscErrorCode ierr; 1470 1471 PetscFunctionBegin; 1472 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1473 anext = mpack->next; 1474 1475 while (anext) { 1476 ierr = MatDestroy(anext->A);CHKERRQ(ierr); 1477 oldanext = anext; 1478 anext = anext->next; 1479 ierr = PetscFree(oldanext);CHKERRQ(ierr); 1480 } 1481 ierr = PetscFree(mpack);CHKERRQ(ierr); 1482 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 1483 PetscFunctionReturn(0); 1484 } 1485 1486 #undef __FUNCT__ 1487 #define __FUNCT__ "DMGetInterpolation_Composite" 1488 PetscErrorCode PETSCDM_DLLEXPORT DMGetInterpolation_Composite(DM coarse,DM fine,Mat *A,Vec *v) 1489 { 1490 PetscErrorCode ierr; 1491 PetscInt m,n,M,N; 1492 struct DMCompositeLink *nextc; 1493 struct DMCompositeLink *nextf; 1494 struct MatPackLink *nextmat,*pnextmat = 0; 1495 struct MatPack *mpack; 1496 Vec gcoarse,gfine; 1497 DM_Composite *comcoarse = (DM_Composite*)coarse->data; 1498 DM_Composite *comfine = (DM_Composite*)fine->data; 1499 1500 PetscFunctionBegin; 1501 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1502 PetscValidHeaderSpecific(fine,DM_CLASSID,2); 1503 nextc = comcoarse->next; 1504 nextf = comfine->next; 1505 /* use global vectors only for determining matrix layout */ 1506 ierr = DMCreateGlobalVector(coarse,&gcoarse);CHKERRQ(ierr); 1507 ierr = DMCreateGlobalVector(fine,&gfine);CHKERRQ(ierr); 1508 ierr = VecGetLocalSize(gcoarse,&n);CHKERRQ(ierr); 1509 ierr = VecGetLocalSize(gfine,&m);CHKERRQ(ierr); 1510 ierr = VecGetSize(gcoarse,&N);CHKERRQ(ierr); 1511 ierr = VecGetSize(gfine,&M);CHKERRQ(ierr); 1512 ierr = VecDestroy(gcoarse);CHKERRQ(ierr); 1513 ierr = VecDestroy(gfine);CHKERRQ(ierr); 1514 1515 ierr = PetscNew(struct MatPack,&mpack);CHKERRQ(ierr); 1516 mpack->right = coarse; 1517 mpack->left = fine; 1518 ierr = MatCreate(((PetscObject)fine)->comm,A);CHKERRQ(ierr); 1519 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1520 ierr = MatSetType(*A,MATSHELL);CHKERRQ(ierr); 1521 ierr = MatShellSetContext(*A,mpack);CHKERRQ(ierr); 1522 ierr = MatShellSetOperation(*A,MATOP_MULT,(void(*)(void))MatMult_Shell_Pack);CHKERRQ(ierr); 1523 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,(void(*)(void))MatMultTranspose_Shell_Pack);CHKERRQ(ierr); 1524 ierr = MatShellSetOperation(*A,MATOP_MULT_ADD,(void(*)(void))MatMultAdd_Shell_Pack);CHKERRQ(ierr); 1525 ierr = MatShellSetOperation(*A,MATOP_DESTROY,(void(*)(void))MatDestroy_Shell_Pack);CHKERRQ(ierr); 1526 1527 /* loop over packed objects, handling one at at time */ 1528 while (nextc) { 1529 if (nextc->type != nextf->type) SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_ARG_INCOMP,"Two DM have different layout"); 1530 1531 if (nextc->type == DMCOMPOSITE_ARRAY) { 1532 ; 1533 } else if (nextc->type == DMCOMPOSITE_DM) { 1534 ierr = PetscNew(struct MatPackLink,&nextmat);CHKERRQ(ierr); 1535 nextmat->next = 0; 1536 if (pnextmat) { 1537 pnextmat->next = nextmat; 1538 pnextmat = nextmat; 1539 } else { 1540 pnextmat = nextmat; 1541 mpack->next = nextmat; 1542 } 1543 ierr = DMGetInterpolation(nextc->dm,nextf->dm,&nextmat->A,PETSC_NULL);CHKERRQ(ierr); 1544 } else { 1545 SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1546 } 1547 nextc = nextc->next; 1548 nextf = nextf->next; 1549 } 1550 PetscFunctionReturn(0); 1551 } 1552 1553 #undef __FUNCT__ 1554 #define __FUNCT__ "DMGetMatrix_Composite_Nest" 1555 static PetscErrorCode DMGetMatrix_Composite_Nest(DM dm,const MatType mtype,Mat *J) 1556 { 1557 1558 PetscFunctionBegin; 1559 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"not implemented"); 1560 PetscFunctionReturn(0); 1561 } 1562 1563 #undef __FUNCT__ 1564 #define __FUNCT__ "DMGetMatrix_Composite_AIJ" 1565 static PetscErrorCode DMGetMatrix_Composite_AIJ(DM dm,const MatType mtype,Mat *J) 1566 { 1567 PetscErrorCode ierr; 1568 DM_Composite *com = (DM_Composite*)dm->data; 1569 struct DMCompositeLink *next = com->next; 1570 PetscInt m,*dnz,*onz,i,j,mA; 1571 Mat Atmp; 1572 PetscMPIInt rank; 1573 PetscBool dense = PETSC_FALSE; 1574 1575 PetscFunctionBegin; 1576 /* use global vector to determine layout needed for matrix */ 1577 m = com->n; 1578 1579 ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr); 1580 ierr = MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 1581 ierr = MatSetType(*J,mtype);CHKERRQ(ierr); 1582 1583 /* 1584 Extremely inefficient but will compute entire Jacobian for testing 1585 */ 1586 ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr); 1587 if (dense) { 1588 PetscInt rstart,rend,*indices; 1589 PetscScalar *values; 1590 1591 mA = com->N; 1592 ierr = MatMPIAIJSetPreallocation(*J,mA,PETSC_NULL,mA-m,PETSC_NULL);CHKERRQ(ierr); 1593 ierr = MatSeqAIJSetPreallocation(*J,mA,PETSC_NULL);CHKERRQ(ierr); 1594 1595 ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr); 1596 ierr = PetscMalloc2(mA,PetscScalar,&values,mA,PetscInt,&indices);CHKERRQ(ierr); 1597 ierr = PetscMemzero(values,mA*sizeof(PetscScalar));CHKERRQ(ierr); 1598 for (i=0; i<mA; i++) indices[i] = i; 1599 for (i=rstart; i<rend; i++) { 1600 ierr = MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);CHKERRQ(ierr); 1601 } 1602 ierr = PetscFree2(values,indices);CHKERRQ(ierr); 1603 ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1604 ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1605 PetscFunctionReturn(0); 1606 } 1607 1608 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1609 ierr = MatPreallocateInitialize(((PetscObject)dm)->comm,m,m,dnz,onz);CHKERRQ(ierr); 1610 /* loop over packed objects, handling one at at time */ 1611 next = com->next; 1612 while (next) { 1613 if (next->type == DMCOMPOSITE_ARRAY) { 1614 if (rank == next->rank) { /* zero the "little" block */ 1615 for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) { 1616 for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) { 1617 ierr = MatPreallocateSet(j,1,&i,dnz,onz);CHKERRQ(ierr); 1618 } 1619 } 1620 } 1621 } else if (next->type == DMCOMPOSITE_DM) { 1622 PetscInt nc,rstart,*ccols,maxnc; 1623 const PetscInt *cols,*rstarts; 1624 PetscMPIInt proc; 1625 1626 ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr); 1627 ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr); 1628 ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr); 1629 ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr); 1630 1631 maxnc = 0; 1632 for (i=0; i<mA; i++) { 1633 ierr = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1634 ierr = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1635 maxnc = PetscMax(nc,maxnc); 1636 } 1637 ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr); 1638 for (i=0; i<mA; i++) { 1639 ierr = MatGetRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr); 1640 /* remap the columns taking into how much they are shifted on each process */ 1641 for (j=0; j<nc; j++) { 1642 proc = 0; 1643 while (cols[j] >= rstarts[proc+1]) proc++; 1644 ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc]; 1645 } 1646 ierr = MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz);CHKERRQ(ierr); 1647 ierr = MatRestoreRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr); 1648 } 1649 ierr = PetscFree(ccols);CHKERRQ(ierr); 1650 ierr = MatDestroy(Atmp);CHKERRQ(ierr); 1651 } else { 1652 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1653 } 1654 next = next->next; 1655 } 1656 if (com->FormCoupleLocations) { 1657 ierr = (*com->FormCoupleLocations)(dm,PETSC_NULL,dnz,onz,__rstart,__nrows,__start,__end);CHKERRQ(ierr); 1658 } 1659 ierr = MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);CHKERRQ(ierr); 1660 ierr = MatSeqAIJSetPreallocation(*J,0,dnz);CHKERRQ(ierr); 1661 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 1662 1663 next = com->next; 1664 while (next) { 1665 if (next->type == DMCOMPOSITE_ARRAY) { 1666 if (rank == next->rank) { 1667 for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) { 1668 for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) { 1669 ierr = MatSetValue(*J,j,i,0.0,INSERT_VALUES);CHKERRQ(ierr); 1670 } 1671 } 1672 } 1673 } else if (next->type == DMCOMPOSITE_DM) { 1674 PetscInt nc,rstart,row,maxnc,*ccols; 1675 const PetscInt *cols,*rstarts; 1676 const PetscScalar *values; 1677 PetscMPIInt proc; 1678 1679 ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr); 1680 ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr); 1681 ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr); 1682 ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr); 1683 maxnc = 0; 1684 for (i=0; i<mA; i++) { 1685 ierr = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1686 ierr = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1687 maxnc = PetscMax(nc,maxnc); 1688 } 1689 ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr); 1690 for (i=0; i<mA; i++) { 1691 ierr = MatGetRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr); 1692 for (j=0; j<nc; j++) { 1693 proc = 0; 1694 while (cols[j] >= rstarts[proc+1]) proc++; 1695 ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc]; 1696 } 1697 row = com->rstart+next->rstart+i; 1698 ierr = MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);CHKERRQ(ierr); 1699 ierr = MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr); 1700 } 1701 ierr = PetscFree(ccols);CHKERRQ(ierr); 1702 ierr = MatDestroy(Atmp);CHKERRQ(ierr); 1703 } else { 1704 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1705 } 1706 next = next->next; 1707 } 1708 if (com->FormCoupleLocations) { 1709 PetscInt __rstart; 1710 ierr = MatGetOwnershipRange(*J,&__rstart,PETSC_NULL);CHKERRQ(ierr); 1711 ierr = (*com->FormCoupleLocations)(dm,*J,PETSC_NULL,PETSC_NULL,__rstart,0,0,0);CHKERRQ(ierr); 1712 } 1713 ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1714 ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1715 PetscFunctionReturn(0); 1716 } 1717 1718 #undef __FUNCT__ 1719 #define __FUNCT__ "DMGetMatrix_Composite" 1720 PetscErrorCode PETSCDM_DLLEXPORT DMGetMatrix_Composite(DM dm,const MatType mtype,Mat *J) 1721 { 1722 PetscErrorCode ierr; 1723 PetscBool usenest; 1724 ISLocalToGlobalMapping ltogmap,ltogmapb; 1725 1726 PetscFunctionBegin; 1727 ierr = PetscStrcmp(mtype,MATNEST,&usenest);CHKERRQ(ierr); 1728 if (usenest) { 1729 ierr = DMGetMatrix_Composite_Nest(dm,mtype,J);CHKERRQ(ierr); 1730 } else { 1731 ierr = DMGetMatrix_Composite_AIJ(dm,mtype?mtype:MATAIJ,J);CHKERRQ(ierr); 1732 } 1733 1734 ierr = DMGetLocalToGlobalMapping(dm,<ogmap);CHKERRQ(ierr); 1735 ierr = DMGetLocalToGlobalMappingBlock(dm,<ogmapb);CHKERRQ(ierr); 1736 ierr = MatSetLocalToGlobalMapping(*J,ltogmap,ltogmap);CHKERRQ(ierr); 1737 ierr = MatSetLocalToGlobalMappingBlock(*J,ltogmapb,ltogmapb);CHKERRQ(ierr); 1738 PetscFunctionReturn(0); 1739 } 1740 1741 #undef __FUNCT__ 1742 #define __FUNCT__ "DMCreateLocalToGlobalMapping_Composite" 1743 static PetscErrorCode DMCreateLocalToGlobalMapping_Composite(DM dm) 1744 { 1745 DM_Composite *com = (DM_Composite*)dm->data; 1746 ISLocalToGlobalMapping *ltogs; 1747 PetscInt i,cnt,m,*idx; 1748 PetscErrorCode ierr; 1749 1750 PetscFunctionBegin; 1751 /* Set the ISLocalToGlobalMapping on the new matrix */ 1752 ierr = DMCompositeGetISLocalToGlobalMappings(dm,<ogs);CHKERRQ(ierr); 1753 for (cnt=0,i=0; i<(com->nDM+com->nredundant); i++) { 1754 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1755 cnt += m; 1756 } 1757 ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1758 for (cnt=0,i=0; i<(com->nDM+com->nredundant); i++) { 1759 const PetscInt *subidx; 1760 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1761 ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1762 ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr); 1763 ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1764 cnt += m; 1765 } 1766 ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,cnt,idx,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr); 1767 for (i=0; i<com->nDM+com->nredundant; i++) {ierr = ISLocalToGlobalMappingDestroy(ltogs[i]);CHKERRQ(ierr);} 1768 ierr = PetscFree(ltogs);CHKERRQ(ierr); 1769 PetscFunctionReturn(0); 1770 } 1771 1772 1773 #undef __FUNCT__ 1774 #define __FUNCT__ "DMGetColoring_Composite" 1775 PetscErrorCode PETSCDM_DLLEXPORT DMGetColoring_Composite(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring) 1776 { 1777 PetscErrorCode ierr; 1778 PetscInt n,i,cnt; 1779 ISColoringValue *colors; 1780 PetscBool dense = PETSC_FALSE; 1781 ISColoringValue maxcol = 0; 1782 DM_Composite *com = (DM_Composite*)dm->data; 1783 1784 PetscFunctionBegin; 1785 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1786 if (ctype == IS_COLORING_GHOSTED) { 1787 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Currently you must use -dmmg_iscoloring_type global" ); 1788 } else if (ctype == IS_COLORING_GLOBAL) { 1789 n = com->n; 1790 } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Unknown ISColoringType"); 1791 ierr = PetscMalloc(n*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); /* freed in ISColoringDestroy() */ 1792 1793 ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr); 1794 if (dense) { 1795 for (i=0; i<n; i++) { 1796 colors[i] = (ISColoringValue)(com->rstart + i); 1797 } 1798 maxcol = com->N; 1799 } else { 1800 struct DMCompositeLink *next = com->next; 1801 PetscMPIInt rank; 1802 1803 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1804 cnt = 0; 1805 while (next) { 1806 if (next->type == DMCOMPOSITE_ARRAY) { 1807 if (rank == next->rank) { /* each column gets is own color */ 1808 for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) { 1809 colors[cnt++] = maxcol++; 1810 } 1811 } 1812 ierr = MPI_Bcast(&maxcol,1,MPIU_COLORING_VALUE,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 1813 } else if (next->type == DMCOMPOSITE_DM) { 1814 ISColoring lcoloring; 1815 1816 ierr = DMGetColoring(next->dm,IS_COLORING_GLOBAL,mtype,&lcoloring);CHKERRQ(ierr); 1817 for (i=0; i<lcoloring->N; i++) { 1818 colors[cnt++] = maxcol + lcoloring->colors[i]; 1819 } 1820 maxcol += lcoloring->n; 1821 ierr = ISColoringDestroy(lcoloring);CHKERRQ(ierr); 1822 } else { 1823 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1824 } 1825 next = next->next; 1826 } 1827 } 1828 ierr = ISColoringCreate(((PetscObject)dm)->comm,maxcol,n,colors,coloring);CHKERRQ(ierr); 1829 PetscFunctionReturn(0); 1830 } 1831 1832 #undef __FUNCT__ 1833 #define __FUNCT__ "DMGlobalToLocalBegin_Composite" 1834 PetscErrorCode PETSCDM_DLLEXPORT DMGlobalToLocalBegin_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec) 1835 { 1836 PetscErrorCode ierr; 1837 struct DMCompositeLink *next; 1838 PetscInt cnt = 3; 1839 PetscMPIInt rank; 1840 PetscScalar *garray,*larray; 1841 DM_Composite *com = (DM_Composite*)dm->data; 1842 1843 PetscFunctionBegin; 1844 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1845 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 1846 next = com->next; 1847 if (!com->setup) { 1848 ierr = DMSetUp(dm);CHKERRQ(ierr); 1849 } 1850 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1851 ierr = VecGetArray(gvec,&garray);CHKERRQ(ierr); 1852 ierr = VecGetArray(lvec,&larray);CHKERRQ(ierr); 1853 1854 /* loop over packed objects, handling one at at time */ 1855 while (next) { 1856 if (next->type == DMCOMPOSITE_ARRAY) { 1857 if (rank == next->rank) { 1858 ierr = PetscMemcpy(larray,garray,next->n*sizeof(PetscScalar));CHKERRQ(ierr); 1859 garray += next->n; 1860 } 1861 /* does not handle ADD_VALUES */ 1862 ierr = MPI_Bcast(larray,next->nlocal,MPIU_SCALAR,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 1863 } else if (next->type == DMCOMPOSITE_DM) { 1864 Vec local,global; 1865 PetscInt N; 1866 1867 ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr); 1868 ierr = VecGetLocalSize(global,&N);CHKERRQ(ierr); 1869 ierr = VecPlaceArray(global,garray);CHKERRQ(ierr); 1870 ierr = DMGetLocalVector(next->dm,&local);CHKERRQ(ierr); 1871 ierr = VecPlaceArray(local,larray);CHKERRQ(ierr); 1872 ierr = DMGlobalToLocalBegin(next->dm,global,mode,local);CHKERRQ(ierr); 1873 ierr = DMGlobalToLocalEnd(next->dm,global,mode,local);CHKERRQ(ierr); 1874 ierr = VecResetArray(global);CHKERRQ(ierr); 1875 ierr = VecResetArray(local);CHKERRQ(ierr); 1876 ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr); 1877 ierr = DMRestoreGlobalVector(next->dm,&local);CHKERRQ(ierr); 1878 } else { 1879 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1880 } 1881 cnt++; 1882 larray += next->nlocal; 1883 next = next->next; 1884 } 1885 1886 ierr = VecRestoreArray(gvec,PETSC_NULL);CHKERRQ(ierr); 1887 ierr = VecRestoreArray(lvec,PETSC_NULL);CHKERRQ(ierr); 1888 PetscFunctionReturn(0); 1889 } 1890 1891 #undef __FUNCT__ 1892 #define __FUNCT__ "DMGlobalToLocalEnd_Composite" 1893 PetscErrorCode PETSCDM_DLLEXPORT DMGlobalToLocalEnd_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec) 1894 { 1895 PetscFunctionBegin; 1896 PetscFunctionReturn(0); 1897 } 1898 1899 EXTERN_C_BEGIN 1900 #undef __FUNCT__ 1901 #define __FUNCT__ "DMCreate_Composite" 1902 PetscErrorCode PETSCDM_DLLEXPORT DMCreate_Composite(DM p) 1903 { 1904 PetscErrorCode ierr; 1905 DM_Composite *com; 1906 1907 PetscFunctionBegin; 1908 ierr = PetscNewLog(p,DM_Composite,&com);CHKERRQ(ierr); 1909 p->data = com; 1910 ierr = PetscObjectChangeTypeName((PetscObject)p,"DMComposite");CHKERRQ(ierr); 1911 com->n = 0; 1912 com->next = PETSC_NULL; 1913 com->nredundant = 0; 1914 com->nDM = 0; 1915 1916 p->ops->createglobalvector = DMCreateGlobalVector_Composite; 1917 p->ops->createlocalvector = DMCreateLocalVector_Composite; 1918 p->ops->createlocaltoglobalmapping = DMCreateLocalToGlobalMapping_Composite; 1919 p->ops->createlocaltoglobalmappingblock = 0; 1920 p->ops->refine = DMRefine_Composite; 1921 p->ops->getinterpolation = DMGetInterpolation_Composite; 1922 p->ops->getmatrix = DMGetMatrix_Composite; 1923 p->ops->getcoloring = DMGetColoring_Composite; 1924 p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Composite; 1925 p->ops->globaltolocalend = DMGlobalToLocalEnd_Composite; 1926 p->ops->destroy = DMDestroy_Composite; 1927 p->ops->view = DMView_Composite; 1928 p->ops->setup = DMSetUp_Composite; 1929 PetscFunctionReturn(0); 1930 } 1931 EXTERN_C_END 1932 1933 #undef __FUNCT__ 1934 #define __FUNCT__ "DMCompositeCreate" 1935 /*@C 1936 DMCompositeCreate - Creates a vector packer, used to generate "composite" 1937 vectors made up of several subvectors. 1938 1939 Collective on MPI_Comm 1940 1941 Input Parameter: 1942 . comm - the processors that will share the global vector 1943 1944 Output Parameters: 1945 . packer - the packer object 1946 1947 Level: advanced 1948 1949 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(), 1950 DMCompositeGather(), DMCreateGlobalVector(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess() 1951 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 1952 1953 @*/ 1954 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeCreate(MPI_Comm comm,DM *packer) 1955 { 1956 PetscErrorCode ierr; 1957 1958 PetscFunctionBegin; 1959 PetscValidPointer(packer,2); 1960 ierr = DMCreate(comm,packer);CHKERRQ(ierr); 1961 ierr = DMSetType(*packer,DMCOMPOSITE);CHKERRQ(ierr); 1962 PetscFunctionReturn(0); 1963 } 1964