1 2 #include "packimpl.h" /*I "petscdmcomposite.h" I*/ 3 4 #undef __FUNCT__ 5 #define __FUNCT__ "DMCompositeSetCoupling" 6 /*@C 7 DMCompositeSetCoupling - Sets user provided routines that compute the coupling between the 8 seperate components (DMDA's and arrays) in a DMto build the correct matrix nonzero structure. 9 10 11 Logically Collective on MPI_Comm 12 13 Input Parameter: 14 + dm - the composite object 15 - formcouplelocations - routine to set the nonzero locations in the matrix 16 17 Level: advanced 18 19 Notes: See DMCompositeSetContext() and DMCompositeGetContext() for how to get user information into 20 this routine 21 22 @*/ 23 PetscErrorCode DMCompositeSetCoupling(DM dm,PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt)) 24 { 25 DM_Composite *com = (DM_Composite*)dm->data; 26 27 PetscFunctionBegin; 28 com->FormCoupleLocations = FormCoupleLocations; 29 PetscFunctionReturn(0); 30 } 31 32 #undef __FUNCT__ 33 #define __FUNCT__ "DMCompositeSetContext" 34 /*@ 35 DMCompositeSetContext - Allows user to stash data they may need within the form coupling routine they 36 set with DMCompositeSetCoupling() 37 38 39 Not Collective 40 41 Input Parameter: 42 + dm - the composite object 43 - ctx - the user supplied context 44 45 Level: advanced 46 47 Notes: Use DMCompositeGetContext() to retrieve the context when needed. 48 49 @*/ 50 PetscErrorCode DMCompositeSetContext(DM dm,void *ctx) 51 { 52 PetscFunctionBegin; 53 dm->ctx = ctx; 54 PetscFunctionReturn(0); 55 } 56 57 #undef __FUNCT__ 58 #define __FUNCT__ "DMCompositeGetContext" 59 /*@ 60 DMCompositeGetContext - Access the context set with DMCompositeSetContext() 61 62 63 Not Collective 64 65 Input Parameter: 66 . dm - the composite object 67 68 Output Parameter: 69 . ctx - the user supplied context 70 71 Level: advanced 72 73 Notes: Use DMCompositeGetContext() to retrieve the context when needed. 74 75 @*/ 76 PetscErrorCode DMCompositeGetContext(DM dm,void **ctx) 77 { 78 PetscFunctionBegin; 79 *ctx = dm->ctx; 80 PetscFunctionReturn(0); 81 } 82 83 #undef __FUNCT__ 84 #define __FUNCT__ "DMDestroy_Composite" 85 PetscErrorCode DMDestroy_Composite(DM dm) 86 { 87 PetscErrorCode ierr; 88 struct DMCompositeLink *next, *prev; 89 DM_Composite *com = (DM_Composite *)dm->data; 90 91 PetscFunctionBegin; 92 next = com->next; 93 while (next) { 94 prev = next; 95 next = next->next; 96 if (prev->type == DMCOMPOSITE_DM) { 97 ierr = DMDestroy(&prev->dm);CHKERRQ(ierr); 98 } 99 ierr = PetscFree(prev->grstarts);CHKERRQ(ierr); 100 ierr = PetscFree(prev);CHKERRQ(ierr); 101 } 102 PetscFunctionReturn(0); 103 } 104 105 #undef __FUNCT__ 106 #define __FUNCT__ "DMView_Composite" 107 PetscErrorCode DMView_Composite(DM dm,PetscViewer v) 108 { 109 PetscErrorCode ierr; 110 PetscBool iascii; 111 DM_Composite *com = (DM_Composite *)dm->data; 112 113 PetscFunctionBegin; 114 ierr = PetscTypeCompare((PetscObject)v,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 115 if (iascii) { 116 struct DMCompositeLink *lnk = com->next; 117 PetscInt i; 118 119 ierr = PetscViewerASCIIPrintf(v,"DM (%s)\n",((PetscObject)dm)->prefix?((PetscObject)dm)->prefix:"no prefix");CHKERRQ(ierr); 120 ierr = PetscViewerASCIIPrintf(v," contains %d DMs and %d redundant arrays\n",com->nDM,com->nredundant);CHKERRQ(ierr); 121 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 122 for (i=0; lnk; lnk=lnk->next,i++) { 123 if (lnk->dm) { 124 ierr = PetscViewerASCIIPrintf(v,"Link %d: DM of type %s\n",i,((PetscObject)lnk->dm)->type_name);CHKERRQ(ierr); 125 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 126 ierr = DMView(lnk->dm,v);CHKERRQ(ierr); 127 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 128 } else { 129 ierr = PetscViewerASCIIPrintf(v,"Link %d: Redundant array of size %d owned by rank %d\n",i,lnk->nlocal,lnk->rank);CHKERRQ(ierr); 130 } 131 } 132 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 133 } 134 PetscFunctionReturn(0); 135 } 136 137 /* --------------------------------------------------------------------------------------*/ 138 #undef __FUNCT__ 139 #define __FUNCT__ "DMSetUp_Composite" 140 PetscErrorCode DMSetUp_Composite(DM dm) 141 { 142 PetscErrorCode ierr; 143 PetscInt nprev = 0; 144 PetscMPIInt rank,size; 145 DM_Composite *com = (DM_Composite*)dm->data; 146 struct DMCompositeLink *next = com->next; 147 PetscLayout map; 148 149 PetscFunctionBegin; 150 if (com->setup) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Packer has already been setup"); 151 ierr = PetscLayoutCreate(((PetscObject)dm)->comm,&map);CHKERRQ(ierr); 152 ierr = PetscLayoutSetLocalSize(map,com->n);CHKERRQ(ierr); 153 ierr = PetscLayoutSetSize(map,PETSC_DETERMINE);CHKERRQ(ierr); 154 ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr); 155 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 156 ierr = PetscLayoutGetSize(map,&com->N);CHKERRQ(ierr); 157 ierr = PetscLayoutGetRange(map,&com->rstart,PETSC_NULL);CHKERRQ(ierr); 158 ierr = PetscLayoutDestroy(&map);CHKERRQ(ierr); 159 160 /* now set the rstart for each linked array/vector */ 161 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 162 ierr = MPI_Comm_size(((PetscObject)dm)->comm,&size);CHKERRQ(ierr); 163 while (next) { 164 next->rstart = nprev; 165 nprev += next->n; 166 next->grstart = com->rstart + next->rstart; 167 if (next->type == DMCOMPOSITE_ARRAY) { 168 ierr = MPI_Bcast(&next->grstart,1,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 169 } else { 170 ierr = PetscMalloc(size*sizeof(PetscInt),&next->grstarts);CHKERRQ(ierr); 171 ierr = MPI_Allgather(&next->grstart,1,MPIU_INT,next->grstarts,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 172 } 173 next = next->next; 174 } 175 com->setup = PETSC_TRUE; 176 PetscFunctionReturn(0); 177 } 178 179 180 #undef __FUNCT__ 181 #define __FUNCT__ "DMCompositeGetAccess_Array" 182 PetscErrorCode DMCompositeGetAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array) 183 { 184 PetscErrorCode ierr; 185 PetscScalar *varray; 186 PetscMPIInt rank; 187 188 PetscFunctionBegin; 189 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 190 if (array) { 191 if (rank == mine->rank) { 192 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 193 *array = varray + mine->rstart; 194 ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr); 195 } else { 196 *array = 0; 197 } 198 } 199 PetscFunctionReturn(0); 200 } 201 202 #undef __FUNCT__ 203 #define __FUNCT__ "DMCompositeGetAccess_DM" 204 PetscErrorCode DMCompositeGetAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global) 205 { 206 PetscErrorCode ierr; 207 PetscScalar *array; 208 209 PetscFunctionBegin; 210 if (global) { 211 ierr = DMGetGlobalVector(mine->dm,global);CHKERRQ(ierr); 212 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 213 ierr = VecPlaceArray(*global,array+mine->rstart);CHKERRQ(ierr); 214 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 215 } 216 PetscFunctionReturn(0); 217 } 218 219 #undef __FUNCT__ 220 #define __FUNCT__ "DMCompositeRestoreAccess_Array" 221 PetscErrorCode DMCompositeRestoreAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array) 222 { 223 PetscFunctionBegin; 224 PetscFunctionReturn(0); 225 } 226 227 #undef __FUNCT__ 228 #define __FUNCT__ "DMCompositeRestoreAccess_DM" 229 PetscErrorCode DMCompositeRestoreAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global) 230 { 231 PetscErrorCode ierr; 232 233 PetscFunctionBegin; 234 if (global) { 235 ierr = VecResetArray(*global);CHKERRQ(ierr); 236 ierr = DMRestoreGlobalVector(mine->dm,global);CHKERRQ(ierr); 237 } 238 PetscFunctionReturn(0); 239 } 240 241 #undef __FUNCT__ 242 #define __FUNCT__ "DMCompositeScatter_Array" 243 PetscErrorCode DMCompositeScatter_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar *array) 244 { 245 PetscErrorCode ierr; 246 PetscScalar *varray; 247 PetscMPIInt rank; 248 249 PetscFunctionBegin; 250 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 251 if (rank == mine->rank) { 252 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 253 ierr = PetscMemcpy(array,varray+mine->rstart,mine->n*sizeof(PetscScalar));CHKERRQ(ierr); 254 ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr); 255 } 256 ierr = MPI_Bcast(array,mine->nlocal,MPIU_SCALAR,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 257 PetscFunctionReturn(0); 258 } 259 260 #undef __FUNCT__ 261 #define __FUNCT__ "DMCompositeScatter_DM" 262 PetscErrorCode DMCompositeScatter_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec local) 263 { 264 PetscErrorCode ierr; 265 PetscScalar *array; 266 Vec global; 267 268 PetscFunctionBegin; 269 ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr); 270 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 271 ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr); 272 ierr = DMGlobalToLocalBegin(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr); 273 ierr = DMGlobalToLocalEnd(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr); 274 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 275 ierr = VecResetArray(global);CHKERRQ(ierr); 276 ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr); 277 PetscFunctionReturn(0); 278 } 279 280 #undef __FUNCT__ 281 #define __FUNCT__ "DMCompositeGather_Array" 282 PetscErrorCode DMCompositeGather_Array(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,const PetscScalar *array) 283 { 284 PetscErrorCode ierr; 285 PetscScalar *varray; 286 PetscMPIInt rank; 287 288 PetscFunctionBegin; 289 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 290 if (rank == mine->rank) { 291 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 292 if (varray+mine->rstart == array) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You need not DMCompositeGather() into objects obtained via DMCompositeGetAccess()"); 293 } 294 switch (imode) { 295 case INSERT_VALUES: 296 if (rank == mine->rank) { 297 ierr = PetscMemcpy(varray+mine->rstart,array,mine->n*sizeof(PetscScalar));CHKERRQ(ierr); 298 } 299 break; 300 case ADD_VALUES: { 301 PetscInt i; 302 void *source; 303 PetscScalar *buffer,*dest; 304 if (rank == mine->rank) { 305 dest = &varray[mine->rstart]; 306 #if defined(PETSC_HAVE_MPI_IN_PLACE) 307 buffer = dest; 308 source = MPI_IN_PLACE; 309 #else 310 ierr = PetscMalloc(mine->nlocal*sizeof(PetscScalar),&buffer);CHKERRQ(ierr); 311 source = (void *) buffer; 312 #endif 313 for (i=0; i<mine->nlocal; i++) buffer[i] = varray[mine->rstart+i] + array[i]; 314 } else { 315 source = (void *) array; 316 dest = PETSC_NULL; 317 } 318 ierr = MPI_Reduce(source,dest,mine->nlocal,MPIU_SCALAR,MPI_SUM,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 319 #if !defined(PETSC_HAVE_MPI_IN_PLACE) 320 if (rank == mine->rank) {ierr = PetscFree(source);CHKERRQ(ierr);} 321 #endif 322 } break; 323 default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"imode"); 324 } 325 if (rank == mine->rank) {ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr);} 326 PetscFunctionReturn(0); 327 } 328 329 #undef __FUNCT__ 330 #define __FUNCT__ "DMCompositeGather_DM" 331 PetscErrorCode DMCompositeGather_DM(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,Vec local) 332 { 333 PetscErrorCode ierr; 334 PetscScalar *array; 335 Vec global; 336 337 PetscFunctionBegin; 338 ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr); 339 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 340 ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr); 341 ierr = DMLocalToGlobalBegin(mine->dm,local,imode,global);CHKERRQ(ierr); 342 ierr = DMLocalToGlobalEnd(mine->dm,local,imode,global);CHKERRQ(ierr); 343 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 344 ierr = VecResetArray(global);CHKERRQ(ierr); 345 ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr); 346 PetscFunctionReturn(0); 347 } 348 349 /* ----------------------------------------------------------------------------------*/ 350 351 #include <stdarg.h> 352 353 #undef __FUNCT__ 354 #define __FUNCT__ "DMCompositeGetNumberDM" 355 /*@C 356 DMCompositeGetNumberDM - Get's the number of DM objects in the DMComposite 357 representation. 358 359 Not Collective 360 361 Input Parameter: 362 . dm - the packer object 363 364 Output Parameter: 365 . nDM - the number of DMs 366 367 Level: beginner 368 369 @*/ 370 PetscErrorCode DMCompositeGetNumberDM(DM dm,PetscInt *nDM) 371 { 372 DM_Composite *com = (DM_Composite*)dm->data; 373 PetscFunctionBegin; 374 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 375 *nDM = com->nDM; 376 PetscFunctionReturn(0); 377 } 378 379 380 #undef __FUNCT__ 381 #define __FUNCT__ "DMCompositeGetAccess" 382 /*@C 383 DMCompositeGetAccess - Allows one to access the individual packed vectors in their global 384 representation. 385 386 Collective on DMComposite 387 388 Input Parameter: 389 + dm - the packer object 390 . gvec - the global vector 391 - ... - the individual sequential or parallel objects (arrays or vectors) 392 393 Notes: Use DMCompositeRestoreAccess() to return the vectors when you no longer need them 394 395 Level: advanced 396 397 @*/ 398 PetscErrorCode DMCompositeGetAccess(DM dm,Vec gvec,...) 399 { 400 va_list Argp; 401 PetscErrorCode ierr; 402 struct DMCompositeLink *next; 403 DM_Composite *com = (DM_Composite*)dm->data; 404 405 PetscFunctionBegin; 406 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 407 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 408 next = com->next; 409 if (!com->setup) { 410 ierr = DMSetUp(dm);CHKERRQ(ierr); 411 } 412 413 /* loop over packed objects, handling one at at time */ 414 va_start(Argp,gvec); 415 while (next) { 416 if (next->type == DMCOMPOSITE_ARRAY) { 417 PetscScalar **array; 418 array = va_arg(Argp, PetscScalar**); 419 ierr = DMCompositeGetAccess_Array(dm,next,gvec,array);CHKERRQ(ierr); 420 } else if (next->type == DMCOMPOSITE_DM) { 421 Vec *vec; 422 vec = va_arg(Argp, Vec*); 423 ierr = DMCompositeGetAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr); 424 } else { 425 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 426 } 427 next = next->next; 428 } 429 va_end(Argp); 430 PetscFunctionReturn(0); 431 } 432 433 #undef __FUNCT__ 434 #define __FUNCT__ "DMCompositeRestoreAccess" 435 /*@C 436 DMCompositeRestoreAccess - Returns the vectors obtained with DMCompositeGetAccess() 437 representation. 438 439 Collective on DMComposite 440 441 Input Parameter: 442 + dm - the packer object 443 . gvec - the global vector 444 - ... - the individual sequential or parallel objects (arrays or vectors) 445 446 Level: advanced 447 448 .seealso DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 449 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeScatter(), 450 DMCompositeRestoreAccess(), DMCompositeGetAccess() 451 452 @*/ 453 PetscErrorCode DMCompositeRestoreAccess(DM dm,Vec gvec,...) 454 { 455 va_list Argp; 456 PetscErrorCode ierr; 457 struct DMCompositeLink *next; 458 DM_Composite *com = (DM_Composite*)dm->data; 459 460 PetscFunctionBegin; 461 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 462 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 463 next = com->next; 464 if (!com->setup) { 465 ierr = DMSetUp(dm);CHKERRQ(ierr); 466 } 467 468 /* loop over packed objects, handling one at at time */ 469 va_start(Argp,gvec); 470 while (next) { 471 if (next->type == DMCOMPOSITE_ARRAY) { 472 PetscScalar **array; 473 array = va_arg(Argp, PetscScalar**); 474 ierr = DMCompositeRestoreAccess_Array(dm,next,gvec,array);CHKERRQ(ierr); 475 } else if (next->type == DMCOMPOSITE_DM) { 476 Vec *vec; 477 vec = va_arg(Argp, Vec*); 478 ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr); 479 } else { 480 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 481 } 482 next = next->next; 483 } 484 va_end(Argp); 485 PetscFunctionReturn(0); 486 } 487 488 #undef __FUNCT__ 489 #define __FUNCT__ "DMCompositeScatter" 490 /*@C 491 DMCompositeScatter - Scatters from a global packed vector into its individual local vectors 492 493 Collective on DMComposite 494 495 Input Parameter: 496 + dm - the packer object 497 . gvec - the global vector 498 - ... - the individual sequential objects (arrays or vectors), PETSC_NULL for those that are not needed 499 500 Level: advanced 501 502 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 503 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 504 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 505 506 @*/ 507 PetscErrorCode DMCompositeScatter(DM dm,Vec gvec,...) 508 { 509 va_list Argp; 510 PetscErrorCode ierr; 511 struct DMCompositeLink *next; 512 PetscInt cnt; 513 DM_Composite *com = (DM_Composite*)dm->data; 514 515 PetscFunctionBegin; 516 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 517 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 518 if (!com->setup) { 519 ierr = DMSetUp(dm);CHKERRQ(ierr); 520 } 521 522 /* loop over packed objects, handling one at at time */ 523 va_start(Argp,gvec); 524 for (cnt=3,next=com->next; next; cnt++,next=next->next) { 525 if (next->type == DMCOMPOSITE_ARRAY) { 526 PetscScalar *array; 527 array = va_arg(Argp, PetscScalar*); 528 if (array) PetscValidScalarPointer(array,cnt); 529 PetscValidLogicalCollectiveBool(dm,!!array,cnt); 530 if (!array) continue; 531 ierr = DMCompositeScatter_Array(dm,next,gvec,array);CHKERRQ(ierr); 532 } else if (next->type == DMCOMPOSITE_DM) { 533 Vec vec; 534 vec = va_arg(Argp, Vec); 535 if (!vec) continue; 536 PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt); 537 ierr = DMCompositeScatter_DM(dm,next,gvec,vec);CHKERRQ(ierr); 538 } else { 539 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 540 } 541 } 542 va_end(Argp); 543 PetscFunctionReturn(0); 544 } 545 546 #undef __FUNCT__ 547 #define __FUNCT__ "DMCompositeGather" 548 /*@C 549 DMCompositeGather - Gathers into a global packed vector from its individual local vectors 550 551 Collective on DMComposite 552 553 Input Parameter: 554 + dm - the packer object 555 . gvec - the global vector 556 - ... - the individual sequential objects (arrays or vectors), PETSC_NULL for any that are not needed 557 558 Level: advanced 559 560 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 561 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 562 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 563 564 @*/ 565 PetscErrorCode DMCompositeGather(DM dm,Vec gvec,InsertMode imode,...) 566 { 567 va_list Argp; 568 PetscErrorCode ierr; 569 struct DMCompositeLink *next; 570 DM_Composite *com = (DM_Composite*)dm->data; 571 PetscInt cnt; 572 573 PetscFunctionBegin; 574 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 575 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 576 if (!com->setup) { 577 ierr = DMSetUp(dm);CHKERRQ(ierr); 578 } 579 580 /* loop over packed objects, handling one at at time */ 581 va_start(Argp,imode); 582 for (cnt=3,next=com->next; next; cnt++,next=next->next) { 583 if (next->type == DMCOMPOSITE_ARRAY) { 584 PetscScalar *array; 585 array = va_arg(Argp, PetscScalar*); 586 if (!array) continue; 587 PetscValidScalarPointer(array,cnt); 588 ierr = DMCompositeGather_Array(dm,next,gvec,imode,array);CHKERRQ(ierr); 589 } else if (next->type == DMCOMPOSITE_DM) { 590 Vec vec; 591 vec = va_arg(Argp, Vec); 592 if (!vec) continue; 593 PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt); 594 ierr = DMCompositeGather_DM(dm,next,gvec,imode,vec);CHKERRQ(ierr); 595 } else { 596 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 597 } 598 } 599 va_end(Argp); 600 PetscFunctionReturn(0); 601 } 602 603 #undef __FUNCT__ 604 #define __FUNCT__ "DMCompositeAddArray" 605 /*@C 606 DMCompositeAddArray - adds an "redundant" array to a DMComposite. The array values will 607 be stored in part of the array on process orank. 608 609 Collective on DMComposite 610 611 Input Parameter: 612 + dm - the packer object 613 . orank - the process on which the array entries officially live, this number must be 614 the same on all processes. 615 - n - the length of the array 616 617 Level: advanced 618 619 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(), 620 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 621 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 622 623 @*/ 624 PetscErrorCode DMCompositeAddArray(DM dm,PetscMPIInt orank,PetscInt n) 625 { 626 struct DMCompositeLink *mine,*next; 627 PetscErrorCode ierr; 628 PetscMPIInt rank; 629 DM_Composite *com = (DM_Composite*)dm->data; 630 631 PetscFunctionBegin; 632 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 633 next = com->next; 634 if (com->setup) { 635 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add an array once you have used the DMComposite"); 636 } 637 #if defined(PETSC_USE_DEBUG) 638 { 639 PetscMPIInt orankmax; 640 ierr = MPI_Allreduce(&orank,&orankmax,1,MPI_INT,MPI_MAX,((PetscObject)dm)->comm);CHKERRQ(ierr); 641 if (orank != orankmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"orank %d must be equal on all processes, another process has value %d",orank,orankmax); 642 } 643 #endif 644 645 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 646 /* create new link */ 647 ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr); 648 mine->nlocal = n; 649 mine->n = (rank == orank) ? n : 0; 650 mine->rank = orank; 651 mine->dm = PETSC_NULL; 652 mine->type = DMCOMPOSITE_ARRAY; 653 mine->next = PETSC_NULL; 654 if (rank == mine->rank) {com->n += n;com->nmine++;} 655 656 /* add to end of list */ 657 if (!next) { 658 com->next = mine; 659 } else { 660 while (next->next) next = next->next; 661 next->next = mine; 662 } 663 com->nredundant++; 664 PetscFunctionReturn(0); 665 } 666 667 #undef __FUNCT__ 668 #define __FUNCT__ "DMCompositeAddDM" 669 /*@C 670 DMCompositeAddDM - adds a DM vector to a DMComposite 671 672 Collective on DMComposite 673 674 Input Parameter: 675 + dm - the packer object 676 - dm - the DM object, if the DM is a da you will need to caste it with a (DM) 677 678 Level: advanced 679 680 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(), 681 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 682 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 683 684 @*/ 685 PetscErrorCode DMCompositeAddDM(DM dmc,DM dm) 686 { 687 PetscErrorCode ierr; 688 PetscInt n,nlocal; 689 struct DMCompositeLink *mine,*next; 690 Vec global,local; 691 DM_Composite *com = (DM_Composite*)dmc->data; 692 693 PetscFunctionBegin; 694 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 695 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 696 next = com->next; 697 if (com->setup) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add a DM once you have used the DMComposite"); 698 699 /* create new link */ 700 ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr); 701 ierr = PetscObjectReference((PetscObject)dm);CHKERRQ(ierr); 702 ierr = DMGetGlobalVector(dm,&global);CHKERRQ(ierr); 703 ierr = VecGetLocalSize(global,&n);CHKERRQ(ierr); 704 ierr = DMRestoreGlobalVector(dm,&global);CHKERRQ(ierr); 705 ierr = DMGetLocalVector(dm,&local);CHKERRQ(ierr); 706 ierr = VecGetSize(local,&nlocal);CHKERRQ(ierr); 707 ierr = DMRestoreLocalVector(dm,&local);CHKERRQ(ierr); 708 mine->n = n; 709 mine->nlocal = nlocal; 710 mine->dm = dm; 711 mine->type = DMCOMPOSITE_DM; 712 mine->next = PETSC_NULL; 713 com->n += n; 714 715 /* add to end of list */ 716 if (!next) { 717 com->next = mine; 718 } else { 719 while (next->next) next = next->next; 720 next->next = mine; 721 } 722 com->nDM++; 723 com->nmine++; 724 PetscFunctionReturn(0); 725 } 726 727 extern PetscErrorCode VecView_MPI(Vec,PetscViewer); 728 EXTERN_C_BEGIN 729 #undef __FUNCT__ 730 #define __FUNCT__ "VecView_DMComposite" 731 PetscErrorCode VecView_DMComposite(Vec gvec,PetscViewer viewer) 732 { 733 DM dm; 734 PetscErrorCode ierr; 735 struct DMCompositeLink *next; 736 PetscBool isdraw; 737 DM_Composite *com; 738 739 PetscFunctionBegin; 740 ierr = PetscObjectQuery((PetscObject)gvec,"DMComposite",(PetscObject*)&dm);CHKERRQ(ierr); 741 if (!dm) SETERRQ(((PetscObject)gvec)->comm,PETSC_ERR_ARG_WRONG,"Vector not generated from a DMComposite"); 742 com = (DM_Composite*)dm->data; 743 next = com->next; 744 745 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 746 if (!isdraw) { 747 /* do I really want to call this? */ 748 ierr = VecView_MPI(gvec,viewer);CHKERRQ(ierr); 749 } else { 750 PetscInt cnt = 0; 751 752 /* loop over packed objects, handling one at at time */ 753 while (next) { 754 if (next->type == DMCOMPOSITE_ARRAY) { 755 PetscScalar *array; 756 ierr = DMCompositeGetAccess_Array(dm,next,gvec,&array);CHKERRQ(ierr); 757 758 /*skip it for now */ 759 } else if (next->type == DMCOMPOSITE_DM) { 760 Vec vec; 761 PetscInt bs; 762 763 ierr = DMCompositeGetAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr); 764 ierr = VecView(vec,viewer);CHKERRQ(ierr); 765 ierr = VecGetBlockSize(vec,&bs);CHKERRQ(ierr); 766 ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr); 767 ierr = PetscViewerDrawBaseAdd(viewer,bs);CHKERRQ(ierr); 768 cnt += bs; 769 } else { 770 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 771 } 772 next = next->next; 773 } 774 ierr = PetscViewerDrawBaseAdd(viewer,-cnt);CHKERRQ(ierr); 775 } 776 PetscFunctionReturn(0); 777 } 778 EXTERN_C_END 779 780 781 #undef __FUNCT__ 782 #define __FUNCT__ "DMCreateGlobalVector_Composite" 783 PetscErrorCode DMCreateGlobalVector_Composite(DM dm,Vec *gvec) 784 { 785 PetscErrorCode ierr; 786 DM_Composite *com = (DM_Composite*)dm->data; 787 788 PetscFunctionBegin; 789 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 790 if (!com->setup) { 791 ierr = DMSetUp(dm);CHKERRQ(ierr); 792 } 793 ierr = VecCreateMPI(((PetscObject)dm)->comm,com->n,com->N,gvec);CHKERRQ(ierr); 794 ierr = PetscObjectCompose((PetscObject)*gvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr); 795 ierr = VecSetOperation(*gvec,VECOP_VIEW,(void(*)(void))VecView_DMComposite);CHKERRQ(ierr); 796 PetscFunctionReturn(0); 797 } 798 799 #undef __FUNCT__ 800 #define __FUNCT__ "DMCreateLocalVector_Composite" 801 PetscErrorCode DMCreateLocalVector_Composite(DM dm,Vec *lvec) 802 { 803 PetscErrorCode ierr; 804 DM_Composite *com = (DM_Composite*)dm->data; 805 806 PetscFunctionBegin; 807 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 808 if (!com->setup) { 809 ierr = DMSetUp(dm);CHKERRQ(ierr); 810 } 811 ierr = VecCreateSeq(((PetscObject)dm)->comm,com->nghost,lvec);CHKERRQ(ierr); 812 ierr = PetscObjectCompose((PetscObject)*lvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr); 813 PetscFunctionReturn(0); 814 } 815 816 #undef __FUNCT__ 817 #define __FUNCT__ "DMCompositeGetISLocalToGlobalMappings" 818 /*@C 819 DMCompositeGetISLocalToGlobalMappings - gets an ISLocalToGlobalMapping for each DM/array in the DMComposite, maps to the composite global space 820 821 Collective on DM 822 823 Input Parameter: 824 . dm - the packer object 825 826 Output Parameters: 827 . ltogs - the individual mappings for each packed vector/array. Note that this includes 828 all the ghost points that individual ghosted DMDA's may have. Also each process has an 829 mapping for EACH redundant array (not just the local redundant arrays). 830 831 Level: advanced 832 833 Notes: 834 Each entry of ltogs should be destroyed with ISLocalToGlobalMappingDestroy(), the ltogs array should be freed with PetscFree(). 835 836 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 837 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(), 838 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries() 839 840 @*/ 841 PetscErrorCode DMCompositeGetISLocalToGlobalMappings(DM dm,ISLocalToGlobalMapping **ltogs) 842 { 843 PetscErrorCode ierr; 844 PetscInt i,*idx,n,cnt; 845 struct DMCompositeLink *next; 846 PetscMPIInt rank; 847 DM_Composite *com = (DM_Composite*)dm->data; 848 849 PetscFunctionBegin; 850 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 851 ierr = PetscMalloc((com->nDM+com->nredundant)*sizeof(ISLocalToGlobalMapping),ltogs);CHKERRQ(ierr); 852 next = com->next; 853 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 854 855 /* loop over packed objects, handling one at at time */ 856 cnt = 0; 857 while (next) { 858 if (next->type == DMCOMPOSITE_ARRAY) { 859 ierr = PetscMalloc(next->nlocal*sizeof(PetscInt),&idx);CHKERRQ(ierr); 860 if (rank == next->rank) { 861 for (i=0; i<next->nlocal; i++) idx[i] = next->grstart + i; 862 } 863 ierr = MPI_Bcast(idx,next->nlocal,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 864 ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,next->nlocal,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr); 865 } else if (next->type == DMCOMPOSITE_DM) { 866 ISLocalToGlobalMapping ltog; 867 PetscMPIInt size; 868 const PetscInt *suboff,*indices; 869 Vec global; 870 871 /* Get sub-DM global indices for each local dof */ 872 ierr = DMGetLocalToGlobalMapping(next->dm,<og);CHKERRQ(ierr); 873 ierr = ISLocalToGlobalMappingGetSize(ltog,&n);CHKERRQ(ierr); 874 ierr = ISLocalToGlobalMappingGetIndices(ltog,&indices);CHKERRQ(ierr); 875 ierr = PetscMalloc(n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 876 877 /* Get the offsets for the sub-DM global vector */ 878 ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr); 879 ierr = VecGetOwnershipRanges(global,&suboff);CHKERRQ(ierr); 880 ierr = MPI_Comm_size(((PetscObject)global)->comm,&size);CHKERRQ(ierr); 881 882 /* Shift the sub-DM definition of the global space to the composite global space */ 883 for (i=0; i<n; i++) { 884 PetscInt subi = indices[i],lo = 0,hi = size,t; 885 /* Binary search to find which rank owns subi */ 886 while (hi-lo > 1) { 887 t = lo + (hi-lo)/2; 888 if (suboff[t] > subi) hi = t; 889 else lo = t; 890 } 891 idx[i] = subi - suboff[lo] + next->grstarts[lo]; 892 } 893 ierr = ISLocalToGlobalMappingRestoreIndices(ltog,&indices);CHKERRQ(ierr); 894 ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,n,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr); 895 ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr); 896 } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 897 next = next->next; 898 cnt++; 899 } 900 PetscFunctionReturn(0); 901 } 902 903 #undef __FUNCT__ 904 #define __FUNCT__ "DMCompositeGetLocalISs" 905 /*@C 906 DMCompositeGetLocalISs - Gets index sets for each DM/array component of a composite local vector 907 908 Not Collective 909 910 Input Arguments: 911 . dm - composite DM 912 913 Output Arguments: 914 . is - array of serial index sets for each each component of the DMComposite 915 916 Level: intermediate 917 918 Notes: 919 At present, a composite local vector does not normally exist. This function is used to provide index sets for 920 MatGetLocalSubMatrix(). In the future, the scatters for each entry in the DMComposite may be be merged into a single 921 scatter to a composite local vector. 922 923 To get the composite global indices at all local points (including ghosts), use DMCompositeGetISLocalToGlobalMappings(). 924 925 To get index sets for pieces of the composite global vector, use DMCompositeGetGlobalISs(). 926 927 Each returned IS should be destroyed with ISDestroy(), the array should be freed with PetscFree(). 928 929 .seealso: DMCompositeGetGlobalISs(), DMCompositeGetISLocalToGlobalMappings(), MatGetLocalSubMatrix(), MatCreateLocalRef() 930 @*/ 931 PetscErrorCode DMCompositeGetLocalISs(DM dm,IS **is) 932 { 933 PetscErrorCode ierr; 934 DM_Composite *com = (DM_Composite*)dm->data; 935 struct DMCompositeLink *link; 936 PetscInt cnt,start; 937 938 PetscFunctionBegin; 939 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 940 PetscValidPointer(is,2); 941 ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr); 942 for (cnt=0,start=0,link=com->next; link; start+=link->nlocal,cnt++,link=link->next) { 943 ierr = ISCreateStride(PETSC_COMM_SELF,link->nlocal,start,1,&(*is)[cnt]);CHKERRQ(ierr); 944 if (link->type == DMCOMPOSITE_DM) { 945 PetscInt bs; 946 ierr = DMGetBlockSize(link->dm,&bs);CHKERRQ(ierr); 947 ierr = ISSetBlockSize((*is)[cnt],bs);CHKERRQ(ierr); 948 } 949 } 950 PetscFunctionReturn(0); 951 } 952 953 #undef __FUNCT__ 954 #define __FUNCT__ "DMCompositeGetGlobalISs" 955 /*@C 956 DMCompositeGetGlobalISs - Gets the index sets for each composed object 957 958 Collective on DMComposite 959 960 Input Parameter: 961 . dm - the packer object 962 963 Output Parameters: 964 . is - the array of index sets 965 966 Level: advanced 967 968 Notes: 969 The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree() 970 971 These could be used to extract a subset of vector entries for a "multi-physics" preconditioner 972 973 Use DMCompositeGetLocalISs() for index sets in the packed local numbering, and 974 DMCompositeGetISLocalToGlobalMappings() for to map local sub-DM (including ghost) indices to packed global 975 indices. 976 977 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 978 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(), 979 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries() 980 981 @*/ 982 983 PetscErrorCode DMCompositeGetGlobalISs(DM dm,IS *is[]) 984 { 985 PetscErrorCode ierr; 986 PetscInt cnt = 0,*idx,i; 987 struct DMCompositeLink *next; 988 PetscMPIInt rank; 989 DM_Composite *com = (DM_Composite*)dm->data; 990 991 PetscFunctionBegin; 992 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 993 ierr = PetscMalloc((com->nDM+com->nredundant)*sizeof(IS),is);CHKERRQ(ierr); 994 next = com->next; 995 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 996 997 /* loop over packed objects, handling one at at time */ 998 while (next) { 999 ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1000 for (i=0; i<next->n; i++) idx[i] = next->grstart + i; 1001 ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr); 1002 cnt++; 1003 next = next->next; 1004 } 1005 PetscFunctionReturn(0); 1006 } 1007 1008 /* -------------------------------------------------------------------------------------*/ 1009 #undef __FUNCT__ 1010 #define __FUNCT__ "DMCompositeGetLocalVectors_Array" 1011 PetscErrorCode DMCompositeGetLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array) 1012 { 1013 PetscErrorCode ierr; 1014 PetscFunctionBegin; 1015 if (array) { 1016 ierr = PetscMalloc(mine->nlocal*sizeof(PetscScalar),array);CHKERRQ(ierr); 1017 } 1018 PetscFunctionReturn(0); 1019 } 1020 1021 #undef __FUNCT__ 1022 #define __FUNCT__ "DMCompositeGetLocalVectors_DM" 1023 PetscErrorCode DMCompositeGetLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local) 1024 { 1025 PetscErrorCode ierr; 1026 PetscFunctionBegin; 1027 if (local) { 1028 ierr = DMGetLocalVector(mine->dm,local);CHKERRQ(ierr); 1029 } 1030 PetscFunctionReturn(0); 1031 } 1032 1033 #undef __FUNCT__ 1034 #define __FUNCT__ "DMCompositeRestoreLocalVectors_Array" 1035 PetscErrorCode DMCompositeRestoreLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array) 1036 { 1037 PetscErrorCode ierr; 1038 PetscFunctionBegin; 1039 if (array) { 1040 ierr = PetscFree(*array);CHKERRQ(ierr); 1041 } 1042 PetscFunctionReturn(0); 1043 } 1044 1045 #undef __FUNCT__ 1046 #define __FUNCT__ "DMCompositeRestoreLocalVectors_DM" 1047 PetscErrorCode DMCompositeRestoreLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local) 1048 { 1049 PetscErrorCode ierr; 1050 PetscFunctionBegin; 1051 if (local) { 1052 ierr = DMRestoreLocalVector(mine->dm,local);CHKERRQ(ierr); 1053 } 1054 PetscFunctionReturn(0); 1055 } 1056 1057 #undef __FUNCT__ 1058 #define __FUNCT__ "DMCompositeGetLocalVectors" 1059 /*@C 1060 DMCompositeGetLocalVectors - Gets local vectors and arrays for each part of a DMComposite. 1061 Use DMCompositeRestoreLocalVectors() to return them. 1062 1063 Not Collective 1064 1065 Input Parameter: 1066 . dm - the packer object 1067 1068 Output Parameter: 1069 . ... - the individual sequential objects (arrays or vectors) 1070 1071 Level: advanced 1072 1073 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1074 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1075 DMCompositeRestoreLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries() 1076 1077 @*/ 1078 PetscErrorCode DMCompositeGetLocalVectors(DM dm,...) 1079 { 1080 va_list Argp; 1081 PetscErrorCode ierr; 1082 struct DMCompositeLink *next; 1083 DM_Composite *com = (DM_Composite*)dm->data; 1084 1085 PetscFunctionBegin; 1086 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1087 next = com->next; 1088 /* loop over packed objects, handling one at at time */ 1089 va_start(Argp,dm); 1090 while (next) { 1091 if (next->type == DMCOMPOSITE_ARRAY) { 1092 PetscScalar **array; 1093 array = va_arg(Argp, PetscScalar**); 1094 ierr = DMCompositeGetLocalVectors_Array(dm,next,array);CHKERRQ(ierr); 1095 } else if (next->type == DMCOMPOSITE_DM) { 1096 Vec *vec; 1097 vec = va_arg(Argp, Vec*); 1098 ierr = DMCompositeGetLocalVectors_DM(dm,next,vec);CHKERRQ(ierr); 1099 } else { 1100 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1101 } 1102 next = next->next; 1103 } 1104 va_end(Argp); 1105 PetscFunctionReturn(0); 1106 } 1107 1108 #undef __FUNCT__ 1109 #define __FUNCT__ "DMCompositeRestoreLocalVectors" 1110 /*@C 1111 DMCompositeRestoreLocalVectors - Restores local vectors and arrays for each part of a DMComposite. 1112 1113 Not Collective 1114 1115 Input Parameter: 1116 . dm - the packer object 1117 1118 Output Parameter: 1119 . ... - the individual sequential objects (arrays or vectors) 1120 1121 Level: advanced 1122 1123 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1124 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1125 DMCompositeGetLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries() 1126 1127 @*/ 1128 PetscErrorCode DMCompositeRestoreLocalVectors(DM dm,...) 1129 { 1130 va_list Argp; 1131 PetscErrorCode ierr; 1132 struct DMCompositeLink *next; 1133 DM_Composite *com = (DM_Composite*)dm->data; 1134 1135 PetscFunctionBegin; 1136 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1137 next = com->next; 1138 /* loop over packed objects, handling one at at time */ 1139 va_start(Argp,dm); 1140 while (next) { 1141 if (next->type == DMCOMPOSITE_ARRAY) { 1142 PetscScalar **array; 1143 array = va_arg(Argp, PetscScalar**); 1144 ierr = DMCompositeRestoreLocalVectors_Array(dm,next,array);CHKERRQ(ierr); 1145 } else if (next->type == DMCOMPOSITE_DM) { 1146 Vec *vec; 1147 vec = va_arg(Argp, Vec*); 1148 ierr = DMCompositeRestoreLocalVectors_DM(dm,next,vec);CHKERRQ(ierr); 1149 } else { 1150 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1151 } 1152 next = next->next; 1153 } 1154 va_end(Argp); 1155 PetscFunctionReturn(0); 1156 } 1157 1158 /* -------------------------------------------------------------------------------------*/ 1159 #undef __FUNCT__ 1160 #define __FUNCT__ "DMCompositeGetEntries_Array" 1161 PetscErrorCode DMCompositeGetEntries_Array(DM dm,struct DMCompositeLink *mine,PetscInt *n) 1162 { 1163 PetscFunctionBegin; 1164 if (n) *n = mine->nlocal; 1165 PetscFunctionReturn(0); 1166 } 1167 1168 #undef __FUNCT__ 1169 #define __FUNCT__ "DMCompositeGetEntries_DM" 1170 PetscErrorCode DMCompositeGetEntries_DM(DM dmi,struct DMCompositeLink *mine,DM *dm) 1171 { 1172 PetscFunctionBegin; 1173 if (dm) *dm = mine->dm; 1174 PetscFunctionReturn(0); 1175 } 1176 1177 #undef __FUNCT__ 1178 #define __FUNCT__ "DMCompositeGetEntries" 1179 /*@C 1180 DMCompositeGetEntries - Gets the DM, redundant size, etc for each entry in a DMComposite. 1181 1182 Not Collective 1183 1184 Input Parameter: 1185 . dm - the packer object 1186 1187 Output Parameter: 1188 . ... - the individual entries, DMs or integer sizes) 1189 1190 Level: advanced 1191 1192 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1193 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1194 DMCompositeRestoreLocalVectors(), DMCompositeGetLocalVectors(), DMCompositeScatter(), 1195 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors() 1196 1197 @*/ 1198 PetscErrorCode DMCompositeGetEntries(DM dm,...) 1199 { 1200 va_list Argp; 1201 PetscErrorCode ierr; 1202 struct DMCompositeLink *next; 1203 DM_Composite *com = (DM_Composite*)dm->data; 1204 1205 PetscFunctionBegin; 1206 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1207 next = com->next; 1208 /* loop over packed objects, handling one at at time */ 1209 va_start(Argp,dm); 1210 while (next) { 1211 if (next->type == DMCOMPOSITE_ARRAY) { 1212 PetscInt *n; 1213 n = va_arg(Argp, PetscInt*); 1214 ierr = DMCompositeGetEntries_Array(dm,next,n);CHKERRQ(ierr); 1215 } else if (next->type == DMCOMPOSITE_DM) { 1216 DM *dmn; 1217 dmn = va_arg(Argp, DM*); 1218 ierr = DMCompositeGetEntries_DM(dm,next,dmn);CHKERRQ(ierr); 1219 } else { 1220 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1221 } 1222 next = next->next; 1223 } 1224 va_end(Argp); 1225 PetscFunctionReturn(0); 1226 } 1227 1228 #undef __FUNCT__ 1229 #define __FUNCT__ "DMRefine_Composite" 1230 PetscErrorCode DMRefine_Composite(DM dmi,MPI_Comm comm,DM *fine) 1231 { 1232 PetscErrorCode ierr; 1233 struct DMCompositeLink *next; 1234 DM_Composite *com = (DM_Composite*)dmi->data; 1235 DM dm; 1236 1237 PetscFunctionBegin; 1238 PetscValidHeaderSpecific(dmi,DM_CLASSID,1); 1239 next = com->next; 1240 ierr = DMCompositeCreate(comm,fine);CHKERRQ(ierr); 1241 1242 /* loop over packed objects, handling one at at time */ 1243 while (next) { 1244 if (next->type == DMCOMPOSITE_ARRAY) { 1245 ierr = DMCompositeAddArray(*fine,next->rank,next->nlocal);CHKERRQ(ierr); 1246 } else if (next->type == DMCOMPOSITE_DM) { 1247 ierr = DMRefine(next->dm,comm,&dm);CHKERRQ(ierr); 1248 ierr = DMCompositeAddDM(*fine,dm);CHKERRQ(ierr); 1249 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 1250 } else { 1251 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1252 } 1253 next = next->next; 1254 } 1255 PetscFunctionReturn(0); 1256 } 1257 1258 #include <petscmat.h> 1259 1260 struct MatPackLink { 1261 Mat A; 1262 struct MatPackLink *next; 1263 }; 1264 1265 struct MatPack { 1266 DM right,left; 1267 struct MatPackLink *next; 1268 }; 1269 1270 #undef __FUNCT__ 1271 #define __FUNCT__ "MatMultBoth_Shell_Pack" 1272 PetscErrorCode MatMultBoth_Shell_Pack(Mat A,Vec x,Vec y,PetscBool add) 1273 { 1274 struct MatPack *mpack; 1275 struct DMCompositeLink *xnext,*ynext; 1276 struct MatPackLink *anext; 1277 PetscScalar *xarray,*yarray; 1278 PetscErrorCode ierr; 1279 PetscInt i; 1280 Vec xglobal,yglobal; 1281 PetscMPIInt rank; 1282 DM_Composite *comright; 1283 DM_Composite *comleft; 1284 1285 PetscFunctionBegin; 1286 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1287 ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr); 1288 comright = (DM_Composite*)mpack->right->data; 1289 comleft = (DM_Composite*)mpack->left->data; 1290 xnext = comright->next; 1291 ynext = comleft->next; 1292 anext = mpack->next; 1293 1294 while (xnext) { 1295 if (xnext->type == DMCOMPOSITE_ARRAY) { 1296 if (rank == xnext->rank) { 1297 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1298 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1299 if (add) { 1300 for (i=0; i<xnext->n; i++) { 1301 yarray[ynext->rstart+i] += xarray[xnext->rstart+i]; 1302 } 1303 } else { 1304 ierr = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr); 1305 } 1306 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1307 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1308 } 1309 } else if (xnext->type == DMCOMPOSITE_DM) { 1310 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1311 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1312 ierr = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1313 ierr = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1314 ierr = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr); 1315 ierr = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr); 1316 if (add) { 1317 ierr = MatMultAdd(anext->A,xglobal,yglobal,yglobal);CHKERRQ(ierr); 1318 } else { 1319 ierr = MatMult(anext->A,xglobal,yglobal);CHKERRQ(ierr); 1320 } 1321 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1322 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1323 ierr = VecResetArray(xglobal);CHKERRQ(ierr); 1324 ierr = VecResetArray(yglobal);CHKERRQ(ierr); 1325 ierr = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1326 ierr = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1327 anext = anext->next; 1328 } else { 1329 SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1330 } 1331 xnext = xnext->next; 1332 ynext = ynext->next; 1333 } 1334 PetscFunctionReturn(0); 1335 } 1336 1337 #undef __FUNCT__ 1338 #define __FUNCT__ "MatMultAdd_Shell_Pack" 1339 PetscErrorCode MatMultAdd_Shell_Pack(Mat A,Vec x,Vec y,Vec z) 1340 { 1341 PetscErrorCode ierr; 1342 PetscFunctionBegin; 1343 if (z != y) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Handles y == z only"); 1344 ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_TRUE);CHKERRQ(ierr); 1345 PetscFunctionReturn(0); 1346 } 1347 1348 #undef __FUNCT__ 1349 #define __FUNCT__ "MatMult_Shell_Pack" 1350 PetscErrorCode MatMult_Shell_Pack(Mat A,Vec x,Vec y) 1351 { 1352 PetscErrorCode ierr; 1353 PetscFunctionBegin; 1354 ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_FALSE);CHKERRQ(ierr); 1355 PetscFunctionReturn(0); 1356 } 1357 1358 #undef __FUNCT__ 1359 #define __FUNCT__ "MatMultTranspose_Shell_Pack" 1360 PetscErrorCode MatMultTranspose_Shell_Pack(Mat A,Vec x,Vec y) 1361 { 1362 struct MatPack *mpack; 1363 struct DMCompositeLink *xnext,*ynext; 1364 struct MatPackLink *anext; 1365 PetscScalar *xarray,*yarray; 1366 PetscErrorCode ierr; 1367 Vec xglobal,yglobal; 1368 PetscMPIInt rank; 1369 DM_Composite *comright; 1370 DM_Composite *comleft; 1371 1372 PetscFunctionBegin; 1373 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1374 ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr); 1375 comright = (DM_Composite*)mpack->right->data; 1376 comleft = (DM_Composite*)mpack->left->data; 1377 ynext = comright->next; 1378 xnext = comleft->next; 1379 anext = mpack->next; 1380 1381 while (xnext) { 1382 if (xnext->type == DMCOMPOSITE_ARRAY) { 1383 if (rank == ynext->rank) { 1384 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1385 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1386 ierr = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr); 1387 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1388 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1389 } 1390 } else if (xnext->type == DMCOMPOSITE_DM) { 1391 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1392 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1393 ierr = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1394 ierr = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1395 ierr = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr); 1396 ierr = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr); 1397 ierr = MatMultTranspose(anext->A,xglobal,yglobal);CHKERRQ(ierr); 1398 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1399 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1400 ierr = VecResetArray(xglobal);CHKERRQ(ierr); 1401 ierr = VecResetArray(yglobal);CHKERRQ(ierr); 1402 ierr = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1403 ierr = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1404 anext = anext->next; 1405 } else { 1406 SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1407 } 1408 xnext = xnext->next; 1409 ynext = ynext->next; 1410 } 1411 PetscFunctionReturn(0); 1412 } 1413 1414 #undef __FUNCT__ 1415 #define __FUNCT__ "MatDestroy_Shell_Pack" 1416 PetscErrorCode MatDestroy_Shell_Pack(Mat A) 1417 { 1418 struct MatPack *mpack; 1419 struct MatPackLink *anext,*oldanext; 1420 PetscErrorCode ierr; 1421 1422 PetscFunctionBegin; 1423 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1424 anext = mpack->next; 1425 1426 while (anext) { 1427 ierr = MatDestroy(&anext->A);CHKERRQ(ierr); 1428 oldanext = anext; 1429 anext = anext->next; 1430 ierr = PetscFree(oldanext);CHKERRQ(ierr); 1431 } 1432 ierr = PetscFree(mpack);CHKERRQ(ierr); 1433 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 1434 PetscFunctionReturn(0); 1435 } 1436 1437 #undef __FUNCT__ 1438 #define __FUNCT__ "DMGetInterpolation_Composite" 1439 PetscErrorCode DMGetInterpolation_Composite(DM coarse,DM fine,Mat *A,Vec *v) 1440 { 1441 PetscErrorCode ierr; 1442 PetscInt m,n,M,N; 1443 struct DMCompositeLink *nextc; 1444 struct DMCompositeLink *nextf; 1445 struct MatPackLink *nextmat,*pnextmat = 0; 1446 struct MatPack *mpack; 1447 Vec gcoarse,gfine; 1448 DM_Composite *comcoarse = (DM_Composite*)coarse->data; 1449 DM_Composite *comfine = (DM_Composite*)fine->data; 1450 1451 PetscFunctionBegin; 1452 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1453 PetscValidHeaderSpecific(fine,DM_CLASSID,2); 1454 nextc = comcoarse->next; 1455 nextf = comfine->next; 1456 /* use global vectors only for determining matrix layout */ 1457 ierr = DMCreateGlobalVector(coarse,&gcoarse);CHKERRQ(ierr); 1458 ierr = DMCreateGlobalVector(fine,&gfine);CHKERRQ(ierr); 1459 ierr = VecGetLocalSize(gcoarse,&n);CHKERRQ(ierr); 1460 ierr = VecGetLocalSize(gfine,&m);CHKERRQ(ierr); 1461 ierr = VecGetSize(gcoarse,&N);CHKERRQ(ierr); 1462 ierr = VecGetSize(gfine,&M);CHKERRQ(ierr); 1463 ierr = VecDestroy(&gcoarse);CHKERRQ(ierr); 1464 ierr = VecDestroy(&gfine);CHKERRQ(ierr); 1465 1466 ierr = PetscNew(struct MatPack,&mpack);CHKERRQ(ierr); 1467 mpack->right = coarse; 1468 mpack->left = fine; 1469 ierr = MatCreate(((PetscObject)fine)->comm,A);CHKERRQ(ierr); 1470 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1471 ierr = MatSetType(*A,MATSHELL);CHKERRQ(ierr); 1472 ierr = MatShellSetContext(*A,mpack);CHKERRQ(ierr); 1473 ierr = MatShellSetOperation(*A,MATOP_MULT,(void(*)(void))MatMult_Shell_Pack);CHKERRQ(ierr); 1474 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,(void(*)(void))MatMultTranspose_Shell_Pack);CHKERRQ(ierr); 1475 ierr = MatShellSetOperation(*A,MATOP_MULT_ADD,(void(*)(void))MatMultAdd_Shell_Pack);CHKERRQ(ierr); 1476 ierr = MatShellSetOperation(*A,MATOP_DESTROY,(void(*)(void))MatDestroy_Shell_Pack);CHKERRQ(ierr); 1477 1478 /* loop over packed objects, handling one at at time */ 1479 while (nextc) { 1480 if (nextc->type != nextf->type) SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_ARG_INCOMP,"Two DM have different layout"); 1481 1482 if (nextc->type == DMCOMPOSITE_ARRAY) { 1483 ; 1484 } else if (nextc->type == DMCOMPOSITE_DM) { 1485 ierr = PetscNew(struct MatPackLink,&nextmat);CHKERRQ(ierr); 1486 nextmat->next = 0; 1487 if (pnextmat) { 1488 pnextmat->next = nextmat; 1489 pnextmat = nextmat; 1490 } else { 1491 pnextmat = nextmat; 1492 mpack->next = nextmat; 1493 } 1494 ierr = DMGetInterpolation(nextc->dm,nextf->dm,&nextmat->A,PETSC_NULL);CHKERRQ(ierr); 1495 } else { 1496 SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1497 } 1498 nextc = nextc->next; 1499 nextf = nextf->next; 1500 } 1501 PetscFunctionReturn(0); 1502 } 1503 1504 #undef __FUNCT__ 1505 #define __FUNCT__ "DMCreateLocalToGlobalMapping_Composite" 1506 static PetscErrorCode DMCreateLocalToGlobalMapping_Composite(DM dm) 1507 { 1508 DM_Composite *com = (DM_Composite*)dm->data; 1509 ISLocalToGlobalMapping *ltogs; 1510 PetscInt i,cnt,m,*idx; 1511 PetscErrorCode ierr; 1512 1513 PetscFunctionBegin; 1514 /* Set the ISLocalToGlobalMapping on the new matrix */ 1515 ierr = DMCompositeGetISLocalToGlobalMappings(dm,<ogs);CHKERRQ(ierr); 1516 for (cnt=0,i=0; i<(com->nDM+com->nredundant); i++) { 1517 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1518 cnt += m; 1519 } 1520 ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1521 for (cnt=0,i=0; i<(com->nDM+com->nredundant); i++) { 1522 const PetscInt *subidx; 1523 ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr); 1524 ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1525 ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr); 1526 ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr); 1527 cnt += m; 1528 } 1529 ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,cnt,idx,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr); 1530 for (i=0; i<com->nDM+com->nredundant; i++) {ierr = ISLocalToGlobalMappingDestroy(<ogs[i]);CHKERRQ(ierr);} 1531 ierr = PetscFree(ltogs);CHKERRQ(ierr); 1532 PetscFunctionReturn(0); 1533 } 1534 1535 1536 #undef __FUNCT__ 1537 #define __FUNCT__ "DMGetColoring_Composite" 1538 PetscErrorCode DMGetColoring_Composite(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring) 1539 { 1540 PetscErrorCode ierr; 1541 PetscInt n,i,cnt; 1542 ISColoringValue *colors; 1543 PetscBool dense = PETSC_FALSE; 1544 ISColoringValue maxcol = 0; 1545 DM_Composite *com = (DM_Composite*)dm->data; 1546 1547 PetscFunctionBegin; 1548 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1549 if (ctype == IS_COLORING_GHOSTED) { 1550 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Currently you must use -dmmg_iscoloring_type global" ); 1551 } else if (ctype == IS_COLORING_GLOBAL) { 1552 n = com->n; 1553 } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Unknown ISColoringType"); 1554 ierr = PetscMalloc(n*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); /* freed in ISColoringDestroy() */ 1555 1556 ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr); 1557 if (dense) { 1558 for (i=0; i<n; i++) { 1559 colors[i] = (ISColoringValue)(com->rstart + i); 1560 } 1561 maxcol = com->N; 1562 } else { 1563 struct DMCompositeLink *next = com->next; 1564 PetscMPIInt rank; 1565 1566 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1567 cnt = 0; 1568 while (next) { 1569 if (next->type == DMCOMPOSITE_ARRAY) { 1570 if (rank == next->rank) { /* each column gets is own color */ 1571 for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) { 1572 colors[cnt++] = maxcol++; 1573 } 1574 } 1575 ierr = MPI_Bcast(&maxcol,1,MPIU_COLORING_VALUE,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 1576 } else if (next->type == DMCOMPOSITE_DM) { 1577 ISColoring lcoloring; 1578 1579 ierr = DMGetColoring(next->dm,IS_COLORING_GLOBAL,mtype,&lcoloring);CHKERRQ(ierr); 1580 for (i=0; i<lcoloring->N; i++) { 1581 colors[cnt++] = maxcol + lcoloring->colors[i]; 1582 } 1583 maxcol += lcoloring->n; 1584 ierr = ISColoringDestroy(&lcoloring);CHKERRQ(ierr); 1585 } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1586 next = next->next; 1587 } 1588 } 1589 ierr = ISColoringCreate(((PetscObject)dm)->comm,maxcol,n,colors,coloring);CHKERRQ(ierr); 1590 PetscFunctionReturn(0); 1591 } 1592 1593 #undef __FUNCT__ 1594 #define __FUNCT__ "DMGlobalToLocalBegin_Composite" 1595 PetscErrorCode DMGlobalToLocalBegin_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec) 1596 { 1597 PetscErrorCode ierr; 1598 struct DMCompositeLink *next; 1599 PetscInt cnt = 3; 1600 PetscMPIInt rank; 1601 PetscScalar *garray,*larray; 1602 DM_Composite *com = (DM_Composite*)dm->data; 1603 1604 PetscFunctionBegin; 1605 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1606 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 1607 next = com->next; 1608 if (!com->setup) { 1609 ierr = DMSetUp(dm);CHKERRQ(ierr); 1610 } 1611 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1612 ierr = VecGetArray(gvec,&garray);CHKERRQ(ierr); 1613 ierr = VecGetArray(lvec,&larray);CHKERRQ(ierr); 1614 1615 /* loop over packed objects, handling one at at time */ 1616 while (next) { 1617 if (next->type == DMCOMPOSITE_ARRAY) { 1618 if (rank == next->rank) { 1619 ierr = PetscMemcpy(larray,garray,next->n*sizeof(PetscScalar));CHKERRQ(ierr); 1620 garray += next->n; 1621 } 1622 /* does not handle ADD_VALUES */ 1623 ierr = MPI_Bcast(larray,next->nlocal,MPIU_SCALAR,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 1624 } else if (next->type == DMCOMPOSITE_DM) { 1625 Vec local,global; 1626 PetscInt N; 1627 1628 ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr); 1629 ierr = VecGetLocalSize(global,&N);CHKERRQ(ierr); 1630 ierr = VecPlaceArray(global,garray);CHKERRQ(ierr); 1631 ierr = DMGetLocalVector(next->dm,&local);CHKERRQ(ierr); 1632 ierr = VecPlaceArray(local,larray);CHKERRQ(ierr); 1633 ierr = DMGlobalToLocalBegin(next->dm,global,mode,local);CHKERRQ(ierr); 1634 ierr = DMGlobalToLocalEnd(next->dm,global,mode,local);CHKERRQ(ierr); 1635 ierr = VecResetArray(global);CHKERRQ(ierr); 1636 ierr = VecResetArray(local);CHKERRQ(ierr); 1637 ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr); 1638 ierr = DMRestoreGlobalVector(next->dm,&local);CHKERRQ(ierr); 1639 } else { 1640 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1641 } 1642 cnt++; 1643 larray += next->nlocal; 1644 next = next->next; 1645 } 1646 1647 ierr = VecRestoreArray(gvec,PETSC_NULL);CHKERRQ(ierr); 1648 ierr = VecRestoreArray(lvec,PETSC_NULL);CHKERRQ(ierr); 1649 PetscFunctionReturn(0); 1650 } 1651 1652 #undef __FUNCT__ 1653 #define __FUNCT__ "DMGlobalToLocalEnd_Composite" 1654 PetscErrorCode DMGlobalToLocalEnd_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec) 1655 { 1656 PetscFunctionBegin; 1657 PetscFunctionReturn(0); 1658 } 1659 1660 EXTERN_C_BEGIN 1661 #undef __FUNCT__ 1662 #define __FUNCT__ "DMCreate_Composite" 1663 PetscErrorCode DMCreate_Composite(DM p) 1664 { 1665 PetscErrorCode ierr; 1666 DM_Composite *com; 1667 1668 PetscFunctionBegin; 1669 ierr = PetscNewLog(p,DM_Composite,&com);CHKERRQ(ierr); 1670 p->data = com; 1671 ierr = PetscObjectChangeTypeName((PetscObject)p,"DMComposite");CHKERRQ(ierr); 1672 com->n = 0; 1673 com->next = PETSC_NULL; 1674 com->nredundant = 0; 1675 com->nDM = 0; 1676 1677 p->ops->createglobalvector = DMCreateGlobalVector_Composite; 1678 p->ops->createlocalvector = DMCreateLocalVector_Composite; 1679 p->ops->createlocaltoglobalmapping = DMCreateLocalToGlobalMapping_Composite; 1680 p->ops->createlocaltoglobalmappingblock = 0; 1681 p->ops->refine = DMRefine_Composite; 1682 p->ops->getinterpolation = DMGetInterpolation_Composite; 1683 p->ops->getmatrix = DMGetMatrix_Composite; 1684 p->ops->getcoloring = DMGetColoring_Composite; 1685 p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Composite; 1686 p->ops->globaltolocalend = DMGlobalToLocalEnd_Composite; 1687 p->ops->destroy = DMDestroy_Composite; 1688 p->ops->view = DMView_Composite; 1689 p->ops->setup = DMSetUp_Composite; 1690 PetscFunctionReturn(0); 1691 } 1692 EXTERN_C_END 1693 1694 #undef __FUNCT__ 1695 #define __FUNCT__ "DMCompositeCreate" 1696 /*@C 1697 DMCompositeCreate - Creates a vector packer, used to generate "composite" 1698 vectors made up of several subvectors. 1699 1700 Collective on MPI_Comm 1701 1702 Input Parameter: 1703 . comm - the processors that will share the global vector 1704 1705 Output Parameters: 1706 . packer - the packer object 1707 1708 Level: advanced 1709 1710 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(), 1711 DMCompositeGather(), DMCreateGlobalVector(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess() 1712 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 1713 1714 @*/ 1715 PetscErrorCode DMCompositeCreate(MPI_Comm comm,DM *packer) 1716 { 1717 PetscErrorCode ierr; 1718 1719 PetscFunctionBegin; 1720 PetscValidPointer(packer,2); 1721 ierr = DMCreate(comm,packer);CHKERRQ(ierr); 1722 ierr = DMSetType(*packer,DMCOMPOSITE);CHKERRQ(ierr); 1723 PetscFunctionReturn(0); 1724 } 1725