1 #define PETSCDM_DLL 2 3 #include "petscdm.h" /*I "petscdm.h" I*/ 4 #include "private/dmimpl.h" 5 #include "petscmat.h" 6 7 /* 8 rstart is where an array/subvector starts in the global parallel vector, so arrays 9 rstarts are meaningless (and set to the previous one) except on the processor where the array lives 10 */ 11 12 typedef enum {DMCOMPOSITE_ARRAY, DMCOMPOSITE_DM} DMCompositeLinkType; 13 14 struct DMCompositeLink { 15 DMCompositeLinkType type; 16 struct DMCompositeLink *next; 17 PetscInt n,rstart; /* rstart is relative to this process */ 18 PetscInt grstart; /* grstart is relative to all processes */ 19 20 /* only used for DMCOMPOSITE_DM */ 21 PetscInt *grstarts; /* global row for first unknown of this DM on each process */ 22 DM dm; 23 24 /* only used for DMCOMPOSITE_ARRAY */ 25 PetscMPIInt rank; /* process where array unknowns live */ 26 }; 27 28 typedef struct { 29 PetscInt n,N,rstart; /* rstart is relative to all processors, n unknowns owned by this process, N is total unknowns */ 30 PetscInt nghost; /* number of all local entries include DMDA ghost points and any shared redundant arrays */ 31 PetscInt nDM,nredundant,nmine; /* how many DM's and seperate redundant arrays used to build DM(nmine is ones on this process) */ 32 PetscBool setup; /* after this is set, cannot add new links to the DM*/ 33 struct DMCompositeLink *next; 34 35 PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt); 36 } DM_Composite; 37 38 #undef __FUNCT__ 39 #define __FUNCT__ "DMCompositeSetCoupling" 40 /*@C 41 DMCompositeSetCoupling - Sets user provided routines that compute the coupling between the 42 seperate components (DMDA's and arrays) in a DMto build the correct matrix nonzero structure. 43 44 45 Logically Collective on MPI_Comm 46 47 Input Parameter: 48 + dm - the composite object 49 - formcouplelocations - routine to set the nonzero locations in the matrix 50 51 Level: advanced 52 53 Notes: See DMCompositeSetContext() and DMCompositeGetContext() for how to get user information into 54 this routine 55 56 @*/ 57 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetCoupling(DM dm,PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt)) 58 { 59 DM_Composite *com = (DM_Composite*)dm->data; 60 61 PetscFunctionBegin; 62 com->FormCoupleLocations = FormCoupleLocations; 63 PetscFunctionReturn(0); 64 } 65 66 #undef __FUNCT__ 67 #define __FUNCT__ "DMCompositeSetContext" 68 /*@ 69 DMCompositeSetContext - Allows user to stash data they may need within the form coupling routine they 70 set with DMCompositeSetCoupling() 71 72 73 Not Collective 74 75 Input Parameter: 76 + dm - the composite object 77 - ctx - the user supplied context 78 79 Level: advanced 80 81 Notes: Use DMCompositeGetContext() to retrieve the context when needed. 82 83 @*/ 84 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetContext(DM dm,void *ctx) 85 { 86 PetscFunctionBegin; 87 dm->ctx = ctx; 88 PetscFunctionReturn(0); 89 } 90 91 #undef __FUNCT__ 92 #define __FUNCT__ "DMCompositeGetContext" 93 /*@ 94 DMCompositeGetContext - Access the context set with DMCompositeSetContext() 95 96 97 Not Collective 98 99 Input Parameter: 100 . dm - the composite object 101 102 Output Parameter: 103 . ctx - the user supplied context 104 105 Level: advanced 106 107 Notes: Use DMCompositeGetContext() to retrieve the context when needed. 108 109 @*/ 110 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetContext(DM dm,void **ctx) 111 { 112 PetscFunctionBegin; 113 *ctx = dm->ctx; 114 PetscFunctionReturn(0); 115 } 116 117 118 119 extern PetscErrorCode DMDestroy_Private(DM,PetscBool *); 120 121 #undef __FUNCT__ 122 #define __FUNCT__ "DMDestroy_Composite" 123 PetscErrorCode PETSCDM_DLLEXPORT DMDestroy_Composite(DM dm) 124 { 125 PetscErrorCode ierr; 126 struct DMCompositeLink *next, *prev; 127 PetscBool done; 128 DM_Composite *com = (DM_Composite *)dm->data; 129 130 PetscFunctionBegin; 131 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 132 ierr = DMDestroy_Private((DM)dm,&done);CHKERRQ(ierr); 133 if (!done) PetscFunctionReturn(0); 134 135 next = com->next; 136 while (next) { 137 prev = next; 138 next = next->next; 139 if (prev->type == DMCOMPOSITE_DM) { 140 ierr = DMDestroy(prev->dm);CHKERRQ(ierr); 141 } 142 if (prev->grstarts) { 143 ierr = PetscFree(prev->grstarts);CHKERRQ(ierr); 144 } 145 ierr = PetscFree(prev);CHKERRQ(ierr); 146 } 147 ierr = PetscFree(com);CHKERRQ(ierr); 148 ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr); 149 PetscFunctionReturn(0); 150 } 151 152 #undef __FUNCT__ 153 #define __FUNCT__ "DMView_Composite" 154 PetscErrorCode PETSCDM_DLLEXPORT DMView_Composite(DM dm,PetscViewer v) 155 { 156 PetscErrorCode ierr; 157 PetscBool iascii; 158 DM_Composite *com = (DM_Composite *)dm->data; 159 160 PetscFunctionBegin; 161 ierr = PetscTypeCompare((PetscObject)v,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 162 if (iascii) { 163 struct DMCompositeLink *lnk = com->next; 164 PetscInt i; 165 166 ierr = PetscViewerASCIIPrintf(v,"DM (%s)\n",((PetscObject)dm)->prefix?((PetscObject)dm)->prefix:"no prefix");CHKERRQ(ierr); 167 ierr = PetscViewerASCIIPrintf(v," contains %d DMs and %d redundant arrays\n",com->nDM,com->nredundant);CHKERRQ(ierr); 168 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 169 for (i=0; lnk; lnk=lnk->next,i++) { 170 if (lnk->dm) { 171 ierr = PetscViewerASCIIPrintf(v,"Link %d: DM of type %s\n",i,((PetscObject)lnk->dm)->type_name);CHKERRQ(ierr); 172 ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr); 173 ierr = DMView(lnk->dm,v);CHKERRQ(ierr); 174 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 175 } else { 176 ierr = PetscViewerASCIIPrintf(v,"Link %d: Redundant array of size %d owned by rank %d\n",i,lnk->n,lnk->rank);CHKERRQ(ierr); 177 } 178 } 179 ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr); 180 } 181 PetscFunctionReturn(0); 182 } 183 184 /* --------------------------------------------------------------------------------------*/ 185 #undef __FUNCT__ 186 #define __FUNCT__ "DMSetUp_Composite" 187 PetscErrorCode PETSCDM_DLLEXPORT DMSetUp_Composite(DM dm) 188 { 189 PetscErrorCode ierr; 190 PetscInt nprev = 0; 191 PetscMPIInt rank,size; 192 DM_Composite *com = (DM_Composite*)dm->data; 193 struct DMCompositeLink *next = com->next; 194 PetscLayout map; 195 196 PetscFunctionBegin; 197 if (com->setup) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Packer has already been setup"); 198 ierr = PetscLayoutCreate(((PetscObject)dm)->comm,&map);CHKERRQ(ierr); 199 ierr = PetscLayoutSetLocalSize(map,com->n);CHKERRQ(ierr); 200 ierr = PetscLayoutSetSize(map,PETSC_DETERMINE);CHKERRQ(ierr); 201 ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr); 202 ierr = PetscLayoutSetUp(map);CHKERRQ(ierr); 203 ierr = PetscLayoutGetSize(map,&com->N);CHKERRQ(ierr); 204 ierr = PetscLayoutGetRange(map,&com->rstart,PETSC_NULL);CHKERRQ(ierr); 205 ierr = PetscLayoutDestroy(map);CHKERRQ(ierr); 206 207 /* now set the rstart for each linked array/vector */ 208 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 209 ierr = MPI_Comm_size(((PetscObject)dm)->comm,&size);CHKERRQ(ierr); 210 while (next) { 211 next->rstart = nprev; 212 if ((rank == next->rank) || next->type != DMCOMPOSITE_ARRAY) nprev += next->n; 213 next->grstart = com->rstart + next->rstart; 214 if (next->type == DMCOMPOSITE_ARRAY) { 215 ierr = MPI_Bcast(&next->grstart,1,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 216 } else { 217 ierr = PetscMalloc(size*sizeof(PetscInt),&next->grstarts);CHKERRQ(ierr); 218 ierr = MPI_Allgather(&next->grstart,1,MPIU_INT,next->grstarts,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr); 219 } 220 next = next->next; 221 } 222 com->setup = PETSC_TRUE; 223 PetscFunctionReturn(0); 224 } 225 226 227 #undef __FUNCT__ 228 #define __FUNCT__ "DMCompositeGetAccess_Array" 229 PetscErrorCode DMCompositeGetAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array) 230 { 231 PetscErrorCode ierr; 232 PetscScalar *varray; 233 PetscMPIInt rank; 234 235 PetscFunctionBegin; 236 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 237 if (array) { 238 if (rank == mine->rank) { 239 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 240 *array = varray + mine->rstart; 241 ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr); 242 } else { 243 *array = 0; 244 } 245 } 246 PetscFunctionReturn(0); 247 } 248 249 #undef __FUNCT__ 250 #define __FUNCT__ "DMCompositeGetAccess_DM" 251 PetscErrorCode DMCompositeGetAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global) 252 { 253 PetscErrorCode ierr; 254 PetscScalar *array; 255 256 PetscFunctionBegin; 257 if (global) { 258 ierr = DMGetGlobalVector(mine->dm,global);CHKERRQ(ierr); 259 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 260 ierr = VecPlaceArray(*global,array+mine->rstart);CHKERRQ(ierr); 261 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 262 } 263 PetscFunctionReturn(0); 264 } 265 266 #undef __FUNCT__ 267 #define __FUNCT__ "DMCompositeRestoreAccess_Array" 268 PetscErrorCode DMCompositeRestoreAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array) 269 { 270 PetscFunctionBegin; 271 PetscFunctionReturn(0); 272 } 273 274 #undef __FUNCT__ 275 #define __FUNCT__ "DMCompositeRestoreAccess_DM" 276 PetscErrorCode DMCompositeRestoreAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global) 277 { 278 PetscErrorCode ierr; 279 280 PetscFunctionBegin; 281 if (global) { 282 ierr = VecResetArray(*global);CHKERRQ(ierr); 283 ierr = DMRestoreGlobalVector(mine->dm,global);CHKERRQ(ierr); 284 } 285 PetscFunctionReturn(0); 286 } 287 288 #undef __FUNCT__ 289 #define __FUNCT__ "DMCompositeScatter_Array" 290 PetscErrorCode DMCompositeScatter_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar *array) 291 { 292 PetscErrorCode ierr; 293 PetscScalar *varray; 294 PetscMPIInt rank; 295 296 PetscFunctionBegin; 297 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 298 if (rank == mine->rank) { 299 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 300 ierr = PetscMemcpy(array,varray+mine->rstart,mine->n*sizeof(PetscScalar));CHKERRQ(ierr); 301 ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr); 302 } 303 ierr = MPI_Bcast(array,mine->n,MPIU_SCALAR,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 304 PetscFunctionReturn(0); 305 } 306 307 #undef __FUNCT__ 308 #define __FUNCT__ "DMCompositeScatter_DM" 309 PetscErrorCode DMCompositeScatter_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec local) 310 { 311 PetscErrorCode ierr; 312 PetscScalar *array; 313 Vec global; 314 315 PetscFunctionBegin; 316 ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr); 317 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 318 ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr); 319 ierr = DMGlobalToLocalBegin(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr); 320 ierr = DMGlobalToLocalEnd(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr); 321 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 322 ierr = VecResetArray(global);CHKERRQ(ierr); 323 ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr); 324 PetscFunctionReturn(0); 325 } 326 327 #undef __FUNCT__ 328 #define __FUNCT__ "DMCompositeGather_Array" 329 PetscErrorCode DMCompositeGather_Array(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,const PetscScalar *array) 330 { 331 PetscErrorCode ierr; 332 PetscScalar *varray; 333 PetscMPIInt rank; 334 335 PetscFunctionBegin; 336 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 337 if (rank == mine->rank) { 338 ierr = VecGetArray(vec,&varray);CHKERRQ(ierr); 339 if (varray+mine->rstart == array) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You need not DMCompositeGather() into objects obtained via DMCompositeGetAccess()"); 340 } 341 switch (imode) { 342 case INSERT_VALUES: 343 if (rank == mine->rank) { 344 ierr = PetscMemcpy(varray+mine->rstart,array,mine->n*sizeof(PetscScalar));CHKERRQ(ierr); 345 } 346 break; 347 case ADD_VALUES: { 348 PetscInt i; 349 void *source; 350 PetscScalar *buffer,*dest; 351 if (rank == mine->rank) { 352 dest = &varray[mine->rstart]; 353 #if defined(PETSC_HAVE_MPI_IN_PLACE) 354 buffer = dest; 355 source = MPI_IN_PLACE; 356 #else 357 ierr = PetscMalloc(mine->n*sizeof(PetscScalar),&buffer);CHKERRQ(ierr); 358 source = (void *) buffer; 359 #endif 360 for (i=0; i<mine->n; i++) buffer[i] = varray[mine->rstart+i] + array[i]; 361 } else { 362 source = (void *) array; 363 dest = PETSC_NULL; 364 } 365 ierr = MPI_Reduce(source,dest,mine->n,MPIU_SCALAR,MPI_SUM,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 366 #if !defined(PETSC_HAVE_MPI_IN_PLACE) 367 if (rank == mine->rank) {ierr = PetscFree(source);CHKERRQ(ierr);} 368 #endif 369 } break; 370 default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"imode"); 371 } 372 if (rank == mine->rank) {ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr);} 373 PetscFunctionReturn(0); 374 } 375 376 #undef __FUNCT__ 377 #define __FUNCT__ "DMCompositeGather_DM" 378 PetscErrorCode DMCompositeGather_DM(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,Vec local) 379 { 380 PetscErrorCode ierr; 381 PetscScalar *array; 382 Vec global; 383 384 PetscFunctionBegin; 385 ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr); 386 ierr = VecGetArray(vec,&array);CHKERRQ(ierr); 387 ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr); 388 ierr = DMLocalToGlobalBegin(mine->dm,local,imode,global);CHKERRQ(ierr); 389 ierr = DMLocalToGlobalEnd(mine->dm,local,imode,global);CHKERRQ(ierr); 390 ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr); 391 ierr = VecResetArray(global);CHKERRQ(ierr); 392 ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr); 393 PetscFunctionReturn(0); 394 } 395 396 /* ----------------------------------------------------------------------------------*/ 397 398 #include <stdarg.h> 399 400 #undef __FUNCT__ 401 #define __FUNCT__ "DMCompositeGetNumberDM" 402 /*@C 403 DMCompositeGetNumberDM - Get's the number of DM objects in the DMComposite 404 representation. 405 406 Not Collective 407 408 Input Parameter: 409 . dm - the packer object 410 411 Output Parameter: 412 . nDM - the number of DMs 413 414 Level: beginner 415 416 @*/ 417 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetNumberDM(DM dm,PetscInt *nDM) 418 { 419 DM_Composite *com = (DM_Composite*)dm->data; 420 PetscFunctionBegin; 421 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 422 *nDM = com->nDM; 423 PetscFunctionReturn(0); 424 } 425 426 427 #undef __FUNCT__ 428 #define __FUNCT__ "DMCompositeGetAccess" 429 /*@C 430 DMCompositeGetAccess - Allows one to access the individual packed vectors in their global 431 representation. 432 433 Collective on DMComposite 434 435 Input Parameter: 436 + dm - the packer object 437 . gvec - the global vector 438 - ... - the individual sequential or parallel objects (arrays or vectors) 439 440 Notes: Use DMCompositeRestoreAccess() to return the vectors when you no longer need them 441 442 Level: advanced 443 444 @*/ 445 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetAccess(DM dm,Vec gvec,...) 446 { 447 va_list Argp; 448 PetscErrorCode ierr; 449 struct DMCompositeLink *next; 450 DM_Composite *com = (DM_Composite*)dm->data; 451 452 PetscFunctionBegin; 453 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 454 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 455 next = com->next; 456 if (!com->setup) { 457 ierr = DMSetUp(dm);CHKERRQ(ierr); 458 } 459 460 /* loop over packed objects, handling one at at time */ 461 va_start(Argp,gvec); 462 while (next) { 463 if (next->type == DMCOMPOSITE_ARRAY) { 464 PetscScalar **array; 465 array = va_arg(Argp, PetscScalar**); 466 ierr = DMCompositeGetAccess_Array(dm,next,gvec,array);CHKERRQ(ierr); 467 } else if (next->type == DMCOMPOSITE_DM) { 468 Vec *vec; 469 vec = va_arg(Argp, Vec*); 470 ierr = DMCompositeGetAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr); 471 } else { 472 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 473 } 474 next = next->next; 475 } 476 va_end(Argp); 477 PetscFunctionReturn(0); 478 } 479 480 #undef __FUNCT__ 481 #define __FUNCT__ "DMCompositeRestoreAccess" 482 /*@C 483 DMCompositeRestoreAccess - Returns the vectors obtained with DMCompositeGetAccess() 484 representation. 485 486 Collective on DMComposite 487 488 Input Parameter: 489 + dm - the packer object 490 . gvec - the global vector 491 - ... - the individual sequential or parallel objects (arrays or vectors) 492 493 Level: advanced 494 495 .seealso DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 496 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeScatter(), 497 DMCompositeRestoreAccess(), DMCompositeGetAccess() 498 499 @*/ 500 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreAccess(DM dm,Vec gvec,...) 501 { 502 va_list Argp; 503 PetscErrorCode ierr; 504 struct DMCompositeLink *next; 505 DM_Composite *com = (DM_Composite*)dm->data; 506 507 PetscFunctionBegin; 508 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 509 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 510 next = com->next; 511 if (!com->setup) { 512 ierr = DMSetUp(dm);CHKERRQ(ierr); 513 } 514 515 /* loop over packed objects, handling one at at time */ 516 va_start(Argp,gvec); 517 while (next) { 518 if (next->type == DMCOMPOSITE_ARRAY) { 519 PetscScalar **array; 520 array = va_arg(Argp, PetscScalar**); 521 ierr = DMCompositeRestoreAccess_Array(dm,next,gvec,array);CHKERRQ(ierr); 522 } else if (next->type == DMCOMPOSITE_DM) { 523 Vec *vec; 524 vec = va_arg(Argp, Vec*); 525 ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr); 526 } else { 527 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 528 } 529 next = next->next; 530 } 531 va_end(Argp); 532 PetscFunctionReturn(0); 533 } 534 535 #undef __FUNCT__ 536 #define __FUNCT__ "DMCompositeScatter" 537 /*@C 538 DMCompositeScatter - Scatters from a global packed vector into its individual local vectors 539 540 Collective on DMComposite 541 542 Input Parameter: 543 + dm - the packer object 544 . gvec - the global vector 545 - ... - the individual sequential objects (arrays or vectors), PETSC_NULL for those that are not needed 546 547 Level: advanced 548 549 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 550 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 551 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 552 553 @*/ 554 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeScatter(DM dm,Vec gvec,...) 555 { 556 va_list Argp; 557 PetscErrorCode ierr; 558 struct DMCompositeLink *next; 559 PetscInt cnt; 560 DM_Composite *com = (DM_Composite*)dm->data; 561 562 PetscFunctionBegin; 563 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 564 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 565 if (!com->setup) { 566 ierr = DMSetUp(dm);CHKERRQ(ierr); 567 } 568 569 /* loop over packed objects, handling one at at time */ 570 va_start(Argp,gvec); 571 for (cnt=3,next=com->next; next; cnt++,next=next->next) { 572 if (next->type == DMCOMPOSITE_ARRAY) { 573 PetscScalar *array; 574 array = va_arg(Argp, PetscScalar*); 575 if (!array) continue; 576 PetscValidScalarPointer(array,cnt); 577 ierr = DMCompositeScatter_Array(dm,next,gvec,array);CHKERRQ(ierr); 578 } else if (next->type == DMCOMPOSITE_DM) { 579 Vec vec; 580 vec = va_arg(Argp, Vec); 581 if (!vec) continue; 582 PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt); 583 ierr = DMCompositeScatter_DM(dm,next,gvec,vec);CHKERRQ(ierr); 584 } else { 585 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 586 } 587 } 588 va_end(Argp); 589 PetscFunctionReturn(0); 590 } 591 592 #undef __FUNCT__ 593 #define __FUNCT__ "DMCompositeGather" 594 /*@C 595 DMCompositeGather - Gathers into a global packed vector from its individual local vectors 596 597 Collective on DMComposite 598 599 Input Parameter: 600 + dm - the packer object 601 . gvec - the global vector 602 - ... - the individual sequential objects (arrays or vectors), PETSC_NULL for any that are not needed 603 604 Level: advanced 605 606 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 607 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 608 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 609 610 @*/ 611 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGather(DM dm,Vec gvec,InsertMode imode,...) 612 { 613 va_list Argp; 614 PetscErrorCode ierr; 615 struct DMCompositeLink *next; 616 DM_Composite *com = (DM_Composite*)dm->data; 617 PetscInt cnt; 618 619 PetscFunctionBegin; 620 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 621 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 622 if (!com->setup) { 623 ierr = DMSetUp(dm);CHKERRQ(ierr); 624 } 625 626 /* loop over packed objects, handling one at at time */ 627 va_start(Argp,imode); 628 for (cnt=3,next=com->next; next; cnt++,next=next->next) { 629 if (next->type == DMCOMPOSITE_ARRAY) { 630 PetscScalar *array; 631 array = va_arg(Argp, PetscScalar*); 632 if (!array) continue; 633 PetscValidScalarPointer(array,cnt); 634 ierr = DMCompositeGather_Array(dm,next,gvec,imode,array);CHKERRQ(ierr); 635 } else if (next->type == DMCOMPOSITE_DM) { 636 Vec vec; 637 vec = va_arg(Argp, Vec); 638 if (!vec) continue; 639 PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt); 640 ierr = DMCompositeGather_DM(dm,next,gvec,imode,vec);CHKERRQ(ierr); 641 } else { 642 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 643 } 644 } 645 va_end(Argp); 646 PetscFunctionReturn(0); 647 } 648 649 #undef __FUNCT__ 650 #define __FUNCT__ "DMCompositeAddArray" 651 /*@C 652 DMCompositeAddArray - adds an "redundant" array to a DMComposite. The array values will 653 be stored in part of the array on process orank. 654 655 Collective on DMComposite 656 657 Input Parameter: 658 + dm - the packer object 659 . orank - the process on which the array entries officially live, this number must be 660 the same on all processes. 661 - n - the length of the array 662 663 Level: advanced 664 665 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(), 666 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 667 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 668 669 @*/ 670 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddArray(DM dm,PetscMPIInt orank,PetscInt n) 671 { 672 struct DMCompositeLink *mine,*next; 673 PetscErrorCode ierr; 674 PetscMPIInt rank; 675 DM_Composite *com = (DM_Composite*)dm->data; 676 677 PetscFunctionBegin; 678 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 679 next = com->next; 680 if (com->setup) { 681 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add an array once you have used the DMComposite"); 682 } 683 #if defined(PETSC_USE_DEBUG) 684 { 685 PetscMPIInt orankmax; 686 ierr = MPI_Allreduce(&orank,&orankmax,1,MPI_INT,MPI_MAX,((PetscObject)dm)->comm);CHKERRQ(ierr); 687 if (orank != orankmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"orank %d must be equal on all processes, another process has value %d",orank,orankmax); 688 } 689 #endif 690 691 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 692 /* create new link */ 693 ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr); 694 mine->n = n; 695 mine->rank = orank; 696 mine->dm = PETSC_NULL; 697 mine->type = DMCOMPOSITE_ARRAY; 698 mine->next = PETSC_NULL; 699 if (rank == mine->rank) {com->n += n;com->nmine++;} 700 701 /* add to end of list */ 702 if (!next) { 703 com->next = mine; 704 } else { 705 while (next->next) next = next->next; 706 next->next = mine; 707 } 708 com->nredundant++; 709 PetscFunctionReturn(0); 710 } 711 712 #undef __FUNCT__ 713 #define __FUNCT__ "DMCompositeAddDM" 714 /*@C 715 DMCompositeAddDM - adds a DM vector to a DMComposite 716 717 Collective on DMComposite 718 719 Input Parameter: 720 + dm - the packer object 721 - dm - the DM object, if the DM is a da you will need to caste it with a (DM) 722 723 Level: advanced 724 725 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(), 726 DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 727 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 728 729 @*/ 730 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddDM(DM dmc,DM dm) 731 { 732 PetscErrorCode ierr; 733 PetscInt n; 734 struct DMCompositeLink *mine,*next; 735 Vec global; 736 DM_Composite *com = (DM_Composite*)dmc->data; 737 738 PetscFunctionBegin; 739 PetscValidHeaderSpecific(dmc,DM_CLASSID,1); 740 PetscValidHeaderSpecific(dm,DM_CLASSID,2); 741 next = com->next; 742 if (com->setup) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add a DM once you have used the DMComposite"); 743 744 /* create new link */ 745 ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr); 746 ierr = PetscObjectReference((PetscObject)dm);CHKERRQ(ierr); 747 ierr = DMGetGlobalVector(dm,&global);CHKERRQ(ierr); 748 ierr = VecGetLocalSize(global,&n);CHKERRQ(ierr); 749 ierr = DMRestoreGlobalVector(dm,&global);CHKERRQ(ierr); 750 mine->n = n; 751 mine->dm = dm; 752 mine->type = DMCOMPOSITE_DM; 753 mine->next = PETSC_NULL; 754 com->n += n; 755 756 /* add to end of list */ 757 if (!next) { 758 com->next = mine; 759 } else { 760 while (next->next) next = next->next; 761 next->next = mine; 762 } 763 com->nDM++; 764 com->nmine++; 765 PetscFunctionReturn(0); 766 } 767 768 extern PetscErrorCode PETSCDM_DLLEXPORT VecView_MPI(Vec,PetscViewer); 769 EXTERN_C_BEGIN 770 #undef __FUNCT__ 771 #define __FUNCT__ "VecView_DMComposite" 772 PetscErrorCode PETSCDM_DLLEXPORT VecView_DMComposite(Vec gvec,PetscViewer viewer) 773 { 774 DM dm; 775 PetscErrorCode ierr; 776 struct DMCompositeLink *next; 777 PetscBool isdraw; 778 DM_Composite *com; 779 780 PetscFunctionBegin; 781 ierr = PetscObjectQuery((PetscObject)gvec,"DMComposite",(PetscObject*)&dm);CHKERRQ(ierr); 782 if (!dm) SETERRQ(((PetscObject)gvec)->comm,PETSC_ERR_ARG_WRONG,"Vector not generated from a DMComposite"); 783 com = (DM_Composite*)dm->data; 784 next = com->next; 785 786 ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 787 if (!isdraw) { 788 /* do I really want to call this? */ 789 ierr = VecView_MPI(gvec,viewer);CHKERRQ(ierr); 790 } else { 791 PetscInt cnt = 0; 792 793 /* loop over packed objects, handling one at at time */ 794 while (next) { 795 if (next->type == DMCOMPOSITE_ARRAY) { 796 PetscScalar *array; 797 ierr = DMCompositeGetAccess_Array(dm,next,gvec,&array);CHKERRQ(ierr); 798 799 /*skip it for now */ 800 } else if (next->type == DMCOMPOSITE_DM) { 801 Vec vec; 802 PetscInt bs; 803 804 ierr = DMCompositeGetAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr); 805 ierr = VecView(vec,viewer);CHKERRQ(ierr); 806 ierr = VecGetBlockSize(vec,&bs);CHKERRQ(ierr); 807 ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr); 808 ierr = PetscViewerDrawBaseAdd(viewer,bs);CHKERRQ(ierr); 809 cnt += bs; 810 } else { 811 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 812 } 813 next = next->next; 814 } 815 ierr = PetscViewerDrawBaseAdd(viewer,-cnt);CHKERRQ(ierr); 816 } 817 PetscFunctionReturn(0); 818 } 819 EXTERN_C_END 820 821 822 #undef __FUNCT__ 823 #define __FUNCT__ "DMCreateGlobalVector_Composite" 824 PetscErrorCode PETSCDM_DLLEXPORT DMCreateGlobalVector_Composite(DM dm,Vec *gvec) 825 { 826 PetscErrorCode ierr; 827 DM_Composite *com = (DM_Composite*)dm->data; 828 829 PetscFunctionBegin; 830 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 831 if (!com->setup) { 832 ierr = DMSetUp(dm);CHKERRQ(ierr); 833 } 834 ierr = VecCreateMPI(((PetscObject)dm)->comm,com->n,com->N,gvec);CHKERRQ(ierr); 835 ierr = PetscObjectCompose((PetscObject)*gvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr); 836 ierr = VecSetOperation(*gvec,VECOP_VIEW,(void(*)(void))VecView_DMComposite);CHKERRQ(ierr); 837 PetscFunctionReturn(0); 838 } 839 840 #undef __FUNCT__ 841 #define __FUNCT__ "DMCreateLocalVector_Composite" 842 PetscErrorCode PETSCDM_DLLEXPORT DMCreateLocalVector_Composite(DM dm,Vec *lvec) 843 { 844 PetscErrorCode ierr; 845 DM_Composite *com = (DM_Composite*)dm->data; 846 847 PetscFunctionBegin; 848 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 849 if (!com->setup) { 850 ierr = DMSetUp(dm);CHKERRQ(ierr); 851 } 852 ierr = VecCreateSeq(((PetscObject)dm)->comm,com->nghost,lvec);CHKERRQ(ierr); 853 ierr = PetscObjectCompose((PetscObject)*lvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr); 854 PetscFunctionReturn(0); 855 } 856 857 #undef __FUNCT__ 858 #define __FUNCT__ "DMCompositeGetISLocalToGlobalMappings" 859 /*@C 860 DMCompositeGetISLocalToGlobalMappings - gets an ISLocalToGlobalMapping for each DM/array in the DMComposite, maps to the composite global space 861 862 Collective on DMComposite 863 864 Input Parameter: 865 . dm - the packer object 866 867 Output Parameters: 868 . ltogs - the individual mappings for each packed vector/array. Note that this includes 869 all the ghost points that individual ghosted DMDA's may have. Also each process has an 870 mapping for EACH redundant array (not just the local redundant arrays). 871 872 Level: advanced 873 874 Notes: 875 Each entry of ltogs should be destroyed with ISLocalToGlobalMappingDestroy(), the ltogs array should be freed with PetscFree(). 876 877 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 878 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(), 879 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries() 880 881 @*/ 882 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetISLocalToGlobalMappings(DM dm,ISLocalToGlobalMapping **ltogs) 883 { 884 PetscErrorCode ierr; 885 PetscInt i,*idx,n,cnt; 886 struct DMCompositeLink *next; 887 PetscMPIInt rank; 888 DM_Composite *com = (DM_Composite*)dm->data; 889 890 PetscFunctionBegin; 891 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 892 ierr = PetscMalloc((com->nDM+com->nredundant)*sizeof(ISLocalToGlobalMapping),ltogs);CHKERRQ(ierr); 893 next = com->next; 894 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 895 896 /* loop over packed objects, handling one at at time */ 897 cnt = 0; 898 while (next) { 899 if (next->type == DMCOMPOSITE_ARRAY) { 900 ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 901 if (rank == next->rank) { 902 for (i=0; i<next->n; i++) idx[i] = next->grstart + i; 903 } 904 ierr = MPI_Bcast(idx,next->n,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 905 ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr); 906 } else if (next->type == DMCOMPOSITE_DM) { 907 ISLocalToGlobalMapping ltog; 908 PetscMPIInt size; 909 const PetscInt *suboff; 910 Vec global; 911 912 /* Get sub-DM global indices for each local dof */ 913 ierr = DMDAGetISLocalToGlobalMapping(next->dm,<og);CHKERRQ(ierr); /* This function should become generic to DM */ 914 ierr = ISLocalToGlobalMappingGetSize(ltog,&n);CHKERRQ(ierr); 915 ierr = PetscMalloc(n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 916 for (i=0; i<n; i++) idx[i] = i; 917 ierr = ISLocalToGlobalMappingApply(ltog,n,idx,idx);CHKERRQ(ierr); /* This would be nicer with an ISLocalToGlobalMappingGetIndices() */ 918 919 /* Get the offsets for the sub-DM global vector */ 920 ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr); 921 ierr = VecGetOwnershipRanges(global,&suboff);CHKERRQ(ierr); 922 ierr = MPI_Comm_size(((PetscObject)global)->comm,&size);CHKERRQ(ierr); 923 924 /* Shift the sub-DM definition of the global space to the composite global space */ 925 for (i=0; i<n; i++) { 926 PetscInt subi = idx[i],lo = 0,hi = size,t; 927 /* Binary search to find which rank owns subi */ 928 while (hi-lo > 1) { 929 t = lo + (hi-lo)/2; 930 if (suboff[t] > subi) hi = t; 931 else lo = t; 932 } 933 idx[i] = subi - suboff[lo] + next->grstarts[lo]; 934 } 935 ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,n,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr); 936 ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr); 937 } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 938 next = next->next; 939 cnt++; 940 } 941 PetscFunctionReturn(0); 942 } 943 944 #undef __FUNCT__ 945 #define __FUNCT__ "DMCompositeGetLocalISs" 946 /*@C 947 DMCompositeGetLocalISs - Gets index sets for each DM/array component of a composite local vector 948 949 Not Collective 950 951 Input Arguments: 952 . dm - composite DM 953 954 Output Arguments: 955 . is - array of serial index sets for each each component of the DMComposite 956 957 Level: intermediate 958 959 Notes: 960 At present, a composite local vector does not normally exist. This function is used to provide index sets for 961 MatGetLocalSubMatrix(). In the future, the scatters for each entry in the DMComposite may be be merged into a single 962 scatter to a composite local vector. 963 964 To get the composite global indices at all local points (including ghosts), use DMCompositeGetISLocalToGlobalMappings(). 965 966 To get index sets for pieces of the composite global vector, use DMCompositeGetGlobalISs(). 967 968 Each returned IS should be destroyed with ISDestroy(), the array should be freed with PetscFree(). 969 970 .seealso: DMCompositeGetGlobalISs(), DMCompositeGetISLocalToGlobalMappings(), MatGetLocalSubMatrix(), MatCreateLocalRef() 971 @*/ 972 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalISs(DM dm,IS **is) 973 { 974 PetscErrorCode ierr; 975 DM_Composite *com = (DM_Composite*)dm->data; 976 struct DMCompositeLink *link; 977 PetscInt cnt,start; 978 979 PetscFunctionBegin; 980 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 981 PetscValidPointer(is,2); 982 ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr); 983 for (cnt=0,start=0,link=com->next; link; start+=link->n,cnt++,link=link->next) { 984 ierr = ISCreateStride(PETSC_COMM_SELF,link->n,start,1,&(*is)[cnt]);CHKERRQ(ierr); 985 if (link->type == DMCOMPOSITE_DM) { 986 Vec lvec; 987 PetscInt bs; 988 ierr = DMGetLocalVector(link->dm,&lvec);CHKERRQ(ierr); 989 ierr = VecGetBlockSize(lvec,&bs);CHKERRQ(ierr); 990 ierr = DMRestoreLocalVector(link->dm,&lvec);CHKERRQ(ierr); 991 ierr = ISSetBlockSize((*is)[cnt],bs);CHKERRQ(ierr); 992 } 993 } 994 PetscFunctionReturn(0); 995 } 996 997 #undef __FUNCT__ 998 #define __FUNCT__ "DMCompositeGetGlobalISs" 999 /*@C 1000 DMCompositeGetGlobalISs - Gets the index sets for each composed object 1001 1002 Collective on DMComposite 1003 1004 Input Parameter: 1005 . dm - the packer object 1006 1007 Output Parameters: 1008 . is - the array of index sets 1009 1010 Level: advanced 1011 1012 Notes: 1013 The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree() 1014 1015 The number of IS on each process will/may be different when redundant arrays are used 1016 1017 These could be used to extract a subset of vector entries for a "multi-physics" preconditioner 1018 1019 Use DMCompositeGetLocalISs() for index sets in the packed local numbering, and 1020 DMCompositeGetISLocalToGlobalMappings() for to map local sub-DM (including ghost) indices to packed global 1021 indices. 1022 1023 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1024 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(), 1025 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries() 1026 1027 @*/ 1028 1029 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetGlobalISs(DM dm,IS *is[]) 1030 { 1031 PetscErrorCode ierr; 1032 PetscInt cnt = 0,*idx,i; 1033 struct DMCompositeLink *next; 1034 PetscMPIInt rank; 1035 DM_Composite *com = (DM_Composite*)dm->data; 1036 1037 PetscFunctionBegin; 1038 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1039 ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr); 1040 next = com->next; 1041 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1042 1043 /* loop over packed objects, handling one at at time */ 1044 while (next) { 1045 1046 if (next->type == DMCOMPOSITE_ARRAY) { 1047 1048 if (rank == next->rank) { 1049 ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1050 for (i=0; i<next->n; i++) idx[i] = next->grstart + i; 1051 ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr); 1052 cnt++; 1053 } 1054 1055 } else if (next->type == DMCOMPOSITE_DM) { 1056 ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1057 for (i=0; i<next->n; i++) idx[i] = next->grstart + i; 1058 ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr); 1059 cnt++; 1060 } else { 1061 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1062 } 1063 next = next->next; 1064 } 1065 PetscFunctionReturn(0); 1066 } 1067 1068 /* -------------------------------------------------------------------------------------*/ 1069 #undef __FUNCT__ 1070 #define __FUNCT__ "DMCompositeGetLocalVectors_Array" 1071 PetscErrorCode DMCompositeGetLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array) 1072 { 1073 PetscErrorCode ierr; 1074 PetscFunctionBegin; 1075 if (array) { 1076 ierr = PetscMalloc(mine->n*sizeof(PetscScalar),array);CHKERRQ(ierr); 1077 } 1078 PetscFunctionReturn(0); 1079 } 1080 1081 #undef __FUNCT__ 1082 #define __FUNCT__ "DMCompositeGetLocalVectors_DM" 1083 PetscErrorCode DMCompositeGetLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local) 1084 { 1085 PetscErrorCode ierr; 1086 PetscFunctionBegin; 1087 if (local) { 1088 ierr = DMGetLocalVector(mine->dm,local);CHKERRQ(ierr); 1089 } 1090 PetscFunctionReturn(0); 1091 } 1092 1093 #undef __FUNCT__ 1094 #define __FUNCT__ "DMCompositeRestoreLocalVectors_Array" 1095 PetscErrorCode DMCompositeRestoreLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array) 1096 { 1097 PetscErrorCode ierr; 1098 PetscFunctionBegin; 1099 if (array) { 1100 ierr = PetscFree(*array);CHKERRQ(ierr); 1101 } 1102 PetscFunctionReturn(0); 1103 } 1104 1105 #undef __FUNCT__ 1106 #define __FUNCT__ "DMCompositeRestoreLocalVectors_DM" 1107 PetscErrorCode DMCompositeRestoreLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local) 1108 { 1109 PetscErrorCode ierr; 1110 PetscFunctionBegin; 1111 if (local) { 1112 ierr = DMRestoreLocalVector(mine->dm,local);CHKERRQ(ierr); 1113 } 1114 PetscFunctionReturn(0); 1115 } 1116 1117 #undef __FUNCT__ 1118 #define __FUNCT__ "DMCompositeGetLocalVectors" 1119 /*@C 1120 DMCompositeGetLocalVectors - Gets local vectors and arrays for each part of a DMComposite. 1121 Use DMCompositeRestoreLocalVectors() to return them. 1122 1123 Not Collective 1124 1125 Input Parameter: 1126 . dm - the packer object 1127 1128 Output Parameter: 1129 . ... - the individual sequential objects (arrays or vectors) 1130 1131 Level: advanced 1132 1133 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1134 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1135 DMCompositeRestoreLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries() 1136 1137 @*/ 1138 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalVectors(DM dm,...) 1139 { 1140 va_list Argp; 1141 PetscErrorCode ierr; 1142 struct DMCompositeLink *next; 1143 DM_Composite *com = (DM_Composite*)dm->data; 1144 1145 PetscFunctionBegin; 1146 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1147 next = com->next; 1148 /* loop over packed objects, handling one at at time */ 1149 va_start(Argp,dm); 1150 while (next) { 1151 if (next->type == DMCOMPOSITE_ARRAY) { 1152 PetscScalar **array; 1153 array = va_arg(Argp, PetscScalar**); 1154 ierr = DMCompositeGetLocalVectors_Array(dm,next,array);CHKERRQ(ierr); 1155 } else if (next->type == DMCOMPOSITE_DM) { 1156 Vec *vec; 1157 vec = va_arg(Argp, Vec*); 1158 ierr = DMCompositeGetLocalVectors_DM(dm,next,vec);CHKERRQ(ierr); 1159 } else { 1160 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1161 } 1162 next = next->next; 1163 } 1164 va_end(Argp); 1165 PetscFunctionReturn(0); 1166 } 1167 1168 #undef __FUNCT__ 1169 #define __FUNCT__ "DMCompositeRestoreLocalVectors" 1170 /*@C 1171 DMCompositeRestoreLocalVectors - Restores local vectors and arrays for each part of a DMComposite. 1172 1173 Not Collective 1174 1175 Input Parameter: 1176 . dm - the packer object 1177 1178 Output Parameter: 1179 . ... - the individual sequential objects (arrays or vectors) 1180 1181 Level: advanced 1182 1183 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1184 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1185 DMCompositeGetLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries() 1186 1187 @*/ 1188 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreLocalVectors(DM dm,...) 1189 { 1190 va_list Argp; 1191 PetscErrorCode ierr; 1192 struct DMCompositeLink *next; 1193 DM_Composite *com = (DM_Composite*)dm->data; 1194 1195 PetscFunctionBegin; 1196 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1197 next = com->next; 1198 /* loop over packed objects, handling one at at time */ 1199 va_start(Argp,dm); 1200 while (next) { 1201 if (next->type == DMCOMPOSITE_ARRAY) { 1202 PetscScalar **array; 1203 array = va_arg(Argp, PetscScalar**); 1204 ierr = DMCompositeRestoreLocalVectors_Array(dm,next,array);CHKERRQ(ierr); 1205 } else if (next->type == DMCOMPOSITE_DM) { 1206 Vec *vec; 1207 vec = va_arg(Argp, Vec*); 1208 ierr = DMCompositeRestoreLocalVectors_DM(dm,next,vec);CHKERRQ(ierr); 1209 } else { 1210 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1211 } 1212 next = next->next; 1213 } 1214 va_end(Argp); 1215 PetscFunctionReturn(0); 1216 } 1217 1218 /* -------------------------------------------------------------------------------------*/ 1219 #undef __FUNCT__ 1220 #define __FUNCT__ "DMCompositeGetEntries_Array" 1221 PetscErrorCode DMCompositeGetEntries_Array(DM dm,struct DMCompositeLink *mine,PetscInt *n) 1222 { 1223 PetscFunctionBegin; 1224 if (n) *n = mine->n; 1225 PetscFunctionReturn(0); 1226 } 1227 1228 #undef __FUNCT__ 1229 #define __FUNCT__ "DMCompositeGetEntries_DM" 1230 PetscErrorCode DMCompositeGetEntries_DM(DM dmi,struct DMCompositeLink *mine,DM *dm) 1231 { 1232 PetscFunctionBegin; 1233 if (dm) *dm = mine->dm; 1234 PetscFunctionReturn(0); 1235 } 1236 1237 #undef __FUNCT__ 1238 #define __FUNCT__ "DMCompositeGetEntries" 1239 /*@C 1240 DMCompositeGetEntries - Gets the DM, redundant size, etc for each entry in a DMComposite. 1241 1242 Not Collective 1243 1244 Input Parameter: 1245 . dm - the packer object 1246 1247 Output Parameter: 1248 . ... - the individual entries, DMs or integer sizes) 1249 1250 Level: advanced 1251 1252 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(), 1253 DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(), 1254 DMCompositeRestoreLocalVectors(), DMCompositeGetLocalVectors(), DMCompositeScatter(), 1255 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors() 1256 1257 @*/ 1258 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetEntries(DM dm,...) 1259 { 1260 va_list Argp; 1261 PetscErrorCode ierr; 1262 struct DMCompositeLink *next; 1263 DM_Composite *com = (DM_Composite*)dm->data; 1264 1265 PetscFunctionBegin; 1266 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1267 next = com->next; 1268 /* loop over packed objects, handling one at at time */ 1269 va_start(Argp,dm); 1270 while (next) { 1271 if (next->type == DMCOMPOSITE_ARRAY) { 1272 PetscInt *n; 1273 n = va_arg(Argp, PetscInt*); 1274 ierr = DMCompositeGetEntries_Array(dm,next,n);CHKERRQ(ierr); 1275 } else if (next->type == DMCOMPOSITE_DM) { 1276 DM *dmn; 1277 dmn = va_arg(Argp, DM*); 1278 ierr = DMCompositeGetEntries_DM(dm,next,dmn);CHKERRQ(ierr); 1279 } else { 1280 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1281 } 1282 next = next->next; 1283 } 1284 va_end(Argp); 1285 PetscFunctionReturn(0); 1286 } 1287 1288 #undef __FUNCT__ 1289 #define __FUNCT__ "DMRefine_Composite" 1290 PetscErrorCode PETSCDM_DLLEXPORT DMRefine_Composite(DM dmi,MPI_Comm comm,DM *fine) 1291 { 1292 PetscErrorCode ierr; 1293 struct DMCompositeLink *next; 1294 DM_Composite *com = (DM_Composite*)dmi->data; 1295 DM dm; 1296 1297 PetscFunctionBegin; 1298 PetscValidHeaderSpecific(dmi,DM_CLASSID,1); 1299 next = com->next; 1300 ierr = DMCompositeCreate(comm,fine);CHKERRQ(ierr); 1301 1302 /* loop over packed objects, handling one at at time */ 1303 while (next) { 1304 if (next->type == DMCOMPOSITE_ARRAY) { 1305 ierr = DMCompositeAddArray(*fine,next->rank,next->n);CHKERRQ(ierr); 1306 } else if (next->type == DMCOMPOSITE_DM) { 1307 ierr = DMRefine(next->dm,comm,&dm);CHKERRQ(ierr); 1308 ierr = DMCompositeAddDM(*fine,dm);CHKERRQ(ierr); 1309 ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr); 1310 } else { 1311 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1312 } 1313 next = next->next; 1314 } 1315 PetscFunctionReturn(0); 1316 } 1317 1318 #include "petscmat.h" 1319 1320 struct MatPackLink { 1321 Mat A; 1322 struct MatPackLink *next; 1323 }; 1324 1325 struct MatPack { 1326 DM right,left; 1327 struct MatPackLink *next; 1328 }; 1329 1330 #undef __FUNCT__ 1331 #define __FUNCT__ "MatMultBoth_Shell_Pack" 1332 PetscErrorCode MatMultBoth_Shell_Pack(Mat A,Vec x,Vec y,PetscBool add) 1333 { 1334 struct MatPack *mpack; 1335 struct DMCompositeLink *xnext,*ynext; 1336 struct MatPackLink *anext; 1337 PetscScalar *xarray,*yarray; 1338 PetscErrorCode ierr; 1339 PetscInt i; 1340 Vec xglobal,yglobal; 1341 PetscMPIInt rank; 1342 DM_Composite *comright; 1343 DM_Composite *comleft; 1344 1345 PetscFunctionBegin; 1346 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1347 ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr); 1348 comright = (DM_Composite*)mpack->right->data; 1349 comleft = (DM_Composite*)mpack->left->data; 1350 xnext = comright->next; 1351 ynext = comleft->next; 1352 anext = mpack->next; 1353 1354 while (xnext) { 1355 if (xnext->type == DMCOMPOSITE_ARRAY) { 1356 if (rank == xnext->rank) { 1357 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1358 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1359 if (add) { 1360 for (i=0; i<xnext->n; i++) { 1361 yarray[ynext->rstart+i] += xarray[xnext->rstart+i]; 1362 } 1363 } else { 1364 ierr = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr); 1365 } 1366 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1367 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1368 } 1369 } else if (xnext->type == DMCOMPOSITE_DM) { 1370 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1371 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1372 ierr = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1373 ierr = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1374 ierr = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr); 1375 ierr = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr); 1376 if (add) { 1377 ierr = MatMultAdd(anext->A,xglobal,yglobal,yglobal);CHKERRQ(ierr); 1378 } else { 1379 ierr = MatMult(anext->A,xglobal,yglobal);CHKERRQ(ierr); 1380 } 1381 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1382 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1383 ierr = VecResetArray(xglobal);CHKERRQ(ierr); 1384 ierr = VecResetArray(yglobal);CHKERRQ(ierr); 1385 ierr = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1386 ierr = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1387 anext = anext->next; 1388 } else { 1389 SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1390 } 1391 xnext = xnext->next; 1392 ynext = ynext->next; 1393 } 1394 PetscFunctionReturn(0); 1395 } 1396 1397 #undef __FUNCT__ 1398 #define __FUNCT__ "MatMultAdd_Shell_Pack" 1399 PetscErrorCode MatMultAdd_Shell_Pack(Mat A,Vec x,Vec y,Vec z) 1400 { 1401 PetscErrorCode ierr; 1402 PetscFunctionBegin; 1403 if (z != y) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Handles y == z only"); 1404 ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_TRUE);CHKERRQ(ierr); 1405 PetscFunctionReturn(0); 1406 } 1407 1408 #undef __FUNCT__ 1409 #define __FUNCT__ "MatMult_Shell_Pack" 1410 PetscErrorCode MatMult_Shell_Pack(Mat A,Vec x,Vec y) 1411 { 1412 PetscErrorCode ierr; 1413 PetscFunctionBegin; 1414 ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_FALSE);CHKERRQ(ierr); 1415 PetscFunctionReturn(0); 1416 } 1417 1418 #undef __FUNCT__ 1419 #define __FUNCT__ "MatMultTranspose_Shell_Pack" 1420 PetscErrorCode MatMultTranspose_Shell_Pack(Mat A,Vec x,Vec y) 1421 { 1422 struct MatPack *mpack; 1423 struct DMCompositeLink *xnext,*ynext; 1424 struct MatPackLink *anext; 1425 PetscScalar *xarray,*yarray; 1426 PetscErrorCode ierr; 1427 Vec xglobal,yglobal; 1428 PetscMPIInt rank; 1429 DM_Composite *comright; 1430 DM_Composite *comleft; 1431 1432 PetscFunctionBegin; 1433 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1434 ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr); 1435 comright = (DM_Composite*)mpack->right->data; 1436 comleft = (DM_Composite*)mpack->left->data; 1437 ynext = comright->next; 1438 xnext = comleft->next; 1439 anext = mpack->next; 1440 1441 while (xnext) { 1442 if (xnext->type == DMCOMPOSITE_ARRAY) { 1443 if (rank == ynext->rank) { 1444 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1445 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1446 ierr = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr); 1447 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1448 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1449 } 1450 } else if (xnext->type == DMCOMPOSITE_DM) { 1451 ierr = VecGetArray(x,&xarray);CHKERRQ(ierr); 1452 ierr = VecGetArray(y,&yarray);CHKERRQ(ierr); 1453 ierr = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1454 ierr = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1455 ierr = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr); 1456 ierr = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr); 1457 ierr = MatMultTranspose(anext->A,xglobal,yglobal);CHKERRQ(ierr); 1458 ierr = VecRestoreArray(x,&xarray);CHKERRQ(ierr); 1459 ierr = VecRestoreArray(y,&yarray);CHKERRQ(ierr); 1460 ierr = VecResetArray(xglobal);CHKERRQ(ierr); 1461 ierr = VecResetArray(yglobal);CHKERRQ(ierr); 1462 ierr = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr); 1463 ierr = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr); 1464 anext = anext->next; 1465 } else { 1466 SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1467 } 1468 xnext = xnext->next; 1469 ynext = ynext->next; 1470 } 1471 PetscFunctionReturn(0); 1472 } 1473 1474 #undef __FUNCT__ 1475 #define __FUNCT__ "MatDestroy_Shell_Pack" 1476 PetscErrorCode MatDestroy_Shell_Pack(Mat A) 1477 { 1478 struct MatPack *mpack; 1479 struct MatPackLink *anext,*oldanext; 1480 PetscErrorCode ierr; 1481 1482 PetscFunctionBegin; 1483 ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr); 1484 anext = mpack->next; 1485 1486 while (anext) { 1487 ierr = MatDestroy(anext->A);CHKERRQ(ierr); 1488 oldanext = anext; 1489 anext = anext->next; 1490 ierr = PetscFree(oldanext);CHKERRQ(ierr); 1491 } 1492 ierr = PetscFree(mpack);CHKERRQ(ierr); 1493 ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr); 1494 PetscFunctionReturn(0); 1495 } 1496 1497 #undef __FUNCT__ 1498 #define __FUNCT__ "DMGetInterpolation_Composite" 1499 PetscErrorCode PETSCDM_DLLEXPORT DMGetInterpolation_Composite(DM coarse,DM fine,Mat *A,Vec *v) 1500 { 1501 PetscErrorCode ierr; 1502 PetscInt m,n,M,N; 1503 struct DMCompositeLink *nextc; 1504 struct DMCompositeLink *nextf; 1505 struct MatPackLink *nextmat,*pnextmat = 0; 1506 struct MatPack *mpack; 1507 Vec gcoarse,gfine; 1508 DM_Composite *comcoarse = (DM_Composite*)coarse->data; 1509 DM_Composite *comfine = (DM_Composite*)fine->data; 1510 1511 PetscFunctionBegin; 1512 PetscValidHeaderSpecific(coarse,DM_CLASSID,1); 1513 PetscValidHeaderSpecific(fine,DM_CLASSID,2); 1514 nextc = comcoarse->next; 1515 nextf = comfine->next; 1516 /* use global vectors only for determining matrix layout */ 1517 ierr = DMCreateGlobalVector(coarse,&gcoarse);CHKERRQ(ierr); 1518 ierr = DMCreateGlobalVector(fine,&gfine);CHKERRQ(ierr); 1519 ierr = VecGetLocalSize(gcoarse,&n);CHKERRQ(ierr); 1520 ierr = VecGetLocalSize(gfine,&m);CHKERRQ(ierr); 1521 ierr = VecGetSize(gcoarse,&N);CHKERRQ(ierr); 1522 ierr = VecGetSize(gfine,&M);CHKERRQ(ierr); 1523 ierr = VecDestroy(gcoarse);CHKERRQ(ierr); 1524 ierr = VecDestroy(gfine);CHKERRQ(ierr); 1525 1526 ierr = PetscNew(struct MatPack,&mpack);CHKERRQ(ierr); 1527 mpack->right = coarse; 1528 mpack->left = fine; 1529 ierr = MatCreate(((PetscObject)fine)->comm,A);CHKERRQ(ierr); 1530 ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1531 ierr = MatSetType(*A,MATSHELL);CHKERRQ(ierr); 1532 ierr = MatShellSetContext(*A,mpack);CHKERRQ(ierr); 1533 ierr = MatShellSetOperation(*A,MATOP_MULT,(void(*)(void))MatMult_Shell_Pack);CHKERRQ(ierr); 1534 ierr = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,(void(*)(void))MatMultTranspose_Shell_Pack);CHKERRQ(ierr); 1535 ierr = MatShellSetOperation(*A,MATOP_MULT_ADD,(void(*)(void))MatMultAdd_Shell_Pack);CHKERRQ(ierr); 1536 ierr = MatShellSetOperation(*A,MATOP_DESTROY,(void(*)(void))MatDestroy_Shell_Pack);CHKERRQ(ierr); 1537 1538 /* loop over packed objects, handling one at at time */ 1539 while (nextc) { 1540 if (nextc->type != nextf->type) SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_ARG_INCOMP,"Two DM have different layout"); 1541 1542 if (nextc->type == DMCOMPOSITE_ARRAY) { 1543 ; 1544 } else if (nextc->type == DMCOMPOSITE_DM) { 1545 ierr = PetscNew(struct MatPackLink,&nextmat);CHKERRQ(ierr); 1546 nextmat->next = 0; 1547 if (pnextmat) { 1548 pnextmat->next = nextmat; 1549 pnextmat = nextmat; 1550 } else { 1551 pnextmat = nextmat; 1552 mpack->next = nextmat; 1553 } 1554 ierr = DMGetInterpolation(nextc->dm,nextf->dm,&nextmat->A,PETSC_NULL);CHKERRQ(ierr); 1555 } else { 1556 SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1557 } 1558 nextc = nextc->next; 1559 nextf = nextf->next; 1560 } 1561 PetscFunctionReturn(0); 1562 } 1563 1564 #undef __FUNCT__ 1565 #define __FUNCT__ "DMGetMatrix_Composite" 1566 PetscErrorCode PETSCDM_DLLEXPORT DMGetMatrix_Composite(DM dm, const MatType mtype,Mat *J) 1567 { 1568 PetscErrorCode ierr; 1569 DM_Composite *com = (DM_Composite*)dm->data; 1570 struct DMCompositeLink *next = com->next; 1571 PetscInt m,*dnz,*onz,i,j,mA; 1572 Mat Atmp; 1573 PetscMPIInt rank; 1574 PetscScalar zero = 0.0; 1575 PetscBool dense = PETSC_FALSE; 1576 1577 PetscFunctionBegin; 1578 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1579 1580 /* use global vector to determine layout needed for matrix */ 1581 m = com->n; 1582 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1583 ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr); 1584 ierr = MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 1585 ierr = MatSetType(*J,MATAIJ);CHKERRQ(ierr); 1586 1587 /* 1588 Extremely inefficient but will compute entire Jacobian for testing 1589 */ 1590 ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr); 1591 if (dense) { 1592 PetscInt rstart,rend,*indices; 1593 PetscScalar *values; 1594 1595 mA = com->N; 1596 ierr = MatMPIAIJSetPreallocation(*J,mA,PETSC_NULL,mA-m,PETSC_NULL);CHKERRQ(ierr); 1597 ierr = MatSeqAIJSetPreallocation(*J,mA,PETSC_NULL);CHKERRQ(ierr); 1598 1599 ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr); 1600 ierr = PetscMalloc2(mA,PetscScalar,&values,mA,PetscInt,&indices);CHKERRQ(ierr); 1601 ierr = PetscMemzero(values,mA*sizeof(PetscScalar));CHKERRQ(ierr); 1602 for (i=0; i<mA; i++) indices[i] = i; 1603 for (i=rstart; i<rend; i++) { 1604 ierr = MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);CHKERRQ(ierr); 1605 } 1606 ierr = PetscFree2(values,indices);CHKERRQ(ierr); 1607 ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1608 ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1609 PetscFunctionReturn(0); 1610 } 1611 1612 ierr = MatPreallocateInitialize(((PetscObject)dm)->comm,m,m,dnz,onz);CHKERRQ(ierr); 1613 /* loop over packed objects, handling one at at time */ 1614 next = com->next; 1615 while (next) { 1616 if (next->type == DMCOMPOSITE_ARRAY) { 1617 if (rank == next->rank) { /* zero the "little" block */ 1618 for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) { 1619 for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) { 1620 ierr = MatPreallocateSet(j,1,&i,dnz,onz);CHKERRQ(ierr); 1621 } 1622 } 1623 } 1624 } else if (next->type == DMCOMPOSITE_DM) { 1625 PetscInt nc,rstart,*ccols,maxnc; 1626 const PetscInt *cols,*rstarts; 1627 PetscMPIInt proc; 1628 1629 ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr); 1630 ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr); 1631 ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr); 1632 ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr); 1633 1634 maxnc = 0; 1635 for (i=0; i<mA; i++) { 1636 ierr = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1637 ierr = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1638 maxnc = PetscMax(nc,maxnc); 1639 } 1640 ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr); 1641 for (i=0; i<mA; i++) { 1642 ierr = MatGetRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr); 1643 /* remap the columns taking into how much they are shifted on each process */ 1644 for (j=0; j<nc; j++) { 1645 proc = 0; 1646 while (cols[j] >= rstarts[proc+1]) proc++; 1647 ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc]; 1648 } 1649 ierr = MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz);CHKERRQ(ierr); 1650 ierr = MatRestoreRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr); 1651 } 1652 ierr = PetscFree(ccols);CHKERRQ(ierr); 1653 ierr = MatDestroy(Atmp);CHKERRQ(ierr); 1654 } else { 1655 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1656 } 1657 next = next->next; 1658 } 1659 if (com->FormCoupleLocations) { 1660 ierr = (*com->FormCoupleLocations)(dm,PETSC_NULL,dnz,onz,__rstart,__nrows,__start,__end);CHKERRQ(ierr); 1661 } 1662 ierr = MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);CHKERRQ(ierr); 1663 ierr = MatSeqAIJSetPreallocation(*J,0,dnz);CHKERRQ(ierr); 1664 ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 1665 1666 next = com->next; 1667 while (next) { 1668 if (next->type == DMCOMPOSITE_ARRAY) { 1669 if (rank == next->rank) { 1670 for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) { 1671 for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) { 1672 ierr = MatSetValues(*J,1,&j,1,&i,&zero,INSERT_VALUES);CHKERRQ(ierr); 1673 } 1674 } 1675 } 1676 } else if (next->type == DMCOMPOSITE_DM) { 1677 PetscInt nc,rstart,row,maxnc,*ccols; 1678 const PetscInt *cols,*rstarts; 1679 const PetscScalar *values; 1680 PetscMPIInt proc; 1681 1682 ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr); 1683 ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr); 1684 ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr); 1685 ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr); 1686 maxnc = 0; 1687 for (i=0; i<mA; i++) { 1688 ierr = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1689 ierr = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 1690 maxnc = PetscMax(nc,maxnc); 1691 } 1692 ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr); 1693 for (i=0; i<mA; i++) { 1694 ierr = MatGetRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr); 1695 for (j=0; j<nc; j++) { 1696 proc = 0; 1697 while (cols[j] >= rstarts[proc+1]) proc++; 1698 ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc]; 1699 } 1700 row = com->rstart+next->rstart+i; 1701 ierr = MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);CHKERRQ(ierr); 1702 ierr = MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr); 1703 } 1704 ierr = PetscFree(ccols);CHKERRQ(ierr); 1705 ierr = MatDestroy(Atmp);CHKERRQ(ierr); 1706 } else { 1707 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1708 } 1709 next = next->next; 1710 } 1711 if (com->FormCoupleLocations) { 1712 PetscInt __rstart; 1713 ierr = MatGetOwnershipRange(*J,&__rstart,PETSC_NULL);CHKERRQ(ierr); 1714 ierr = (*com->FormCoupleLocations)(dm,*J,PETSC_NULL,PETSC_NULL,__rstart,0,0,0);CHKERRQ(ierr); 1715 } 1716 ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1717 ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1718 PetscFunctionReturn(0); 1719 } 1720 1721 #undef __FUNCT__ 1722 #define __FUNCT__ "DMGetColoring_Composite" 1723 PetscErrorCode PETSCDM_DLLEXPORT DMGetColoring_Composite(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring) 1724 { 1725 PetscErrorCode ierr; 1726 PetscInt n,i,cnt; 1727 ISColoringValue *colors; 1728 PetscBool dense = PETSC_FALSE; 1729 ISColoringValue maxcol = 0; 1730 DM_Composite *com = (DM_Composite*)dm->data; 1731 1732 PetscFunctionBegin; 1733 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1734 if (ctype == IS_COLORING_GHOSTED) { 1735 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Currently you must use -dmmg_iscoloring_type global" ); 1736 } else if (ctype == IS_COLORING_GLOBAL) { 1737 n = com->n; 1738 } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Unknown ISColoringType"); 1739 ierr = PetscMalloc(n*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); /* freed in ISColoringDestroy() */ 1740 1741 ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr); 1742 if (dense) { 1743 for (i=0; i<n; i++) { 1744 colors[i] = (ISColoringValue)(com->rstart + i); 1745 } 1746 maxcol = com->N; 1747 } else { 1748 struct DMCompositeLink *next = com->next; 1749 PetscMPIInt rank; 1750 1751 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1752 cnt = 0; 1753 while (next) { 1754 if (next->type == DMCOMPOSITE_ARRAY) { 1755 if (rank == next->rank) { /* each column gets is own color */ 1756 for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) { 1757 colors[cnt++] = maxcol++; 1758 } 1759 } 1760 ierr = MPI_Bcast(&maxcol,1,MPIU_COLORING_VALUE,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 1761 } else if (next->type == DMCOMPOSITE_DM) { 1762 ISColoring lcoloring; 1763 1764 ierr = DMGetColoring(next->dm,IS_COLORING_GLOBAL,mtype,&lcoloring);CHKERRQ(ierr); 1765 for (i=0; i<lcoloring->N; i++) { 1766 colors[cnt++] = maxcol + lcoloring->colors[i]; 1767 } 1768 maxcol += lcoloring->n; 1769 ierr = ISColoringDestroy(lcoloring);CHKERRQ(ierr); 1770 } else { 1771 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1772 } 1773 next = next->next; 1774 } 1775 } 1776 ierr = ISColoringCreate(((PetscObject)dm)->comm,maxcol,n,colors,coloring);CHKERRQ(ierr); 1777 PetscFunctionReturn(0); 1778 } 1779 1780 #undef __FUNCT__ 1781 #define __FUNCT__ "DMGlobalToLocalBegin_Composite" 1782 PetscErrorCode PETSCDM_DLLEXPORT DMGlobalToLocalBegin_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec) 1783 { 1784 PetscErrorCode ierr; 1785 struct DMCompositeLink *next; 1786 PetscInt cnt = 3; 1787 PetscMPIInt rank; 1788 PetscScalar *garray,*larray; 1789 DM_Composite *com = (DM_Composite*)dm->data; 1790 1791 PetscFunctionBegin; 1792 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 1793 PetscValidHeaderSpecific(gvec,VEC_CLASSID,2); 1794 next = com->next; 1795 if (!com->setup) { 1796 ierr = DMSetUp(dm);CHKERRQ(ierr); 1797 } 1798 ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr); 1799 ierr = VecGetArray(gvec,&garray);CHKERRQ(ierr); 1800 ierr = VecGetArray(lvec,&larray);CHKERRQ(ierr); 1801 1802 /* loop over packed objects, handling one at at time */ 1803 while (next) { 1804 if (next->type == DMCOMPOSITE_ARRAY) { 1805 if (rank == next->rank) { 1806 ierr = PetscMemcpy(larray,garray,next->n*sizeof(PetscScalar));CHKERRQ(ierr); 1807 garray += next->n; 1808 } 1809 /* does not handle ADD_VALUES */ 1810 ierr = MPI_Bcast(larray,next->n,MPIU_SCALAR,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr); 1811 larray += next->n; 1812 } else if (next->type == DMCOMPOSITE_DM) { 1813 Vec local,global; 1814 PetscInt N; 1815 1816 ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr); 1817 ierr = VecGetLocalSize(global,&N);CHKERRQ(ierr); 1818 ierr = VecPlaceArray(global,garray);CHKERRQ(ierr); 1819 ierr = DMGetLocalVector(next->dm,&local);CHKERRQ(ierr); 1820 ierr = VecPlaceArray(local,larray);CHKERRQ(ierr); 1821 ierr = DMGlobalToLocalBegin(next->dm,global,mode,local);CHKERRQ(ierr); 1822 ierr = DMGlobalToLocalEnd(next->dm,global,mode,local);CHKERRQ(ierr); 1823 ierr = VecResetArray(global);CHKERRQ(ierr); 1824 ierr = VecResetArray(local);CHKERRQ(ierr); 1825 ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr); 1826 ierr = DMRestoreGlobalVector(next->dm,&local);CHKERRQ(ierr); 1827 larray += next->n; 1828 } else { 1829 SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet"); 1830 } 1831 cnt++; 1832 next = next->next; 1833 } 1834 1835 ierr = VecRestoreArray(gvec,PETSC_NULL);CHKERRQ(ierr); 1836 ierr = VecRestoreArray(lvec,PETSC_NULL);CHKERRQ(ierr); 1837 PetscFunctionReturn(0); 1838 } 1839 1840 #undef __FUNCT__ 1841 #define __FUNCT__ "DMGlobalToLocalEnd_Composite" 1842 PetscErrorCode PETSCDM_DLLEXPORT DMGlobalToLocalEnd_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec) 1843 { 1844 PetscFunctionBegin; 1845 PetscFunctionReturn(0); 1846 } 1847 1848 EXTERN_C_BEGIN 1849 #undef __FUNCT__ 1850 #define __FUNCT__ "DMCreate_Composite" 1851 PetscErrorCode PETSCDM_DLLEXPORT DMCreate_Composite(DM p) 1852 { 1853 PetscErrorCode ierr; 1854 DM_Composite *com; 1855 1856 PetscFunctionBegin; 1857 ierr = PetscNewLog(p,DM_Composite,&com);CHKERRQ(ierr); 1858 p->data = com; 1859 ierr = PetscObjectChangeTypeName((PetscObject)p,"DMComposite");CHKERRQ(ierr); 1860 com->n = 0; 1861 com->next = PETSC_NULL; 1862 com->nredundant = 0; 1863 com->nDM = 0; 1864 1865 p->ops->createglobalvector = DMCreateGlobalVector_Composite; 1866 p->ops->createlocalvector = DMCreateLocalVector_Composite; 1867 p->ops->refine = DMRefine_Composite; 1868 p->ops->getinterpolation = DMGetInterpolation_Composite; 1869 p->ops->getmatrix = DMGetMatrix_Composite; 1870 p->ops->getcoloring = DMGetColoring_Composite; 1871 p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Composite; 1872 p->ops->globaltolocalend = DMGlobalToLocalEnd_Composite; 1873 p->ops->destroy = DMDestroy_Composite; 1874 p->ops->view = DMView_Composite; 1875 p->ops->setup = DMSetUp_Composite; 1876 PetscFunctionReturn(0); 1877 } 1878 EXTERN_C_END 1879 1880 #undef __FUNCT__ 1881 #define __FUNCT__ "DMCompositeCreate" 1882 /*@C 1883 DMCompositeCreate - Creates a vector packer, used to generate "composite" 1884 vectors made up of several subvectors. 1885 1886 Collective on MPI_Comm 1887 1888 Input Parameter: 1889 . comm - the processors that will share the global vector 1890 1891 Output Parameters: 1892 . packer - the packer object 1893 1894 Level: advanced 1895 1896 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(), 1897 DMCompositeGather(), DMCreateGlobalVector(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess() 1898 DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries() 1899 1900 @*/ 1901 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeCreate(MPI_Comm comm,DM *packer) 1902 { 1903 PetscErrorCode ierr; 1904 1905 PetscFunctionBegin; 1906 PetscValidPointer(packer,2); 1907 ierr = DMCreate(comm,packer);CHKERRQ(ierr); 1908 ierr = DMSetType(*packer,DMCOMPOSITE);CHKERRQ(ierr); 1909 PetscFunctionReturn(0); 1910 } 1911