1 2 /* 3 Provides utility routines for split MPI communicator. 4 */ 5 #include <petscsys.h> /*I "petscsys.h" I*/ 6 #include <petscviewer.h> 7 8 const char *const PetscSubcommTypes[] = {"GENERAL","CONTIGUOUS","INTERLACED","PetscSubcommType","PETSC_SUBCOMM_",NULL}; 9 10 static PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm); 11 static PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm); 12 13 /*@ 14 PetscSubcommSetFromOptions - Allows setting options from a PetscSubcomm 15 16 Collective on PetscSubcomm 17 18 Input Parameter: 19 . psubcomm - PetscSubcomm context 20 21 Level: beginner 22 23 @*/ 24 PetscErrorCode PetscSubcommSetFromOptions(PetscSubcomm psubcomm) 25 { 26 PetscErrorCode ierr; 27 PetscSubcommType type; 28 PetscBool flg; 29 30 PetscFunctionBegin; 31 PetscCheckFalse(!psubcomm,PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"Must call PetscSubcommCreate firt"); 32 33 ierr = PetscOptionsBegin(psubcomm->parent,psubcomm->subcommprefix,"Options for PetscSubcomm",NULL);CHKERRQ(ierr); 34 CHKERRQ(PetscOptionsEnum("-psubcomm_type",NULL,NULL,PetscSubcommTypes,(PetscEnum)psubcomm->type,(PetscEnum*)&type,&flg)); 35 if (flg && psubcomm->type != type) { 36 /* free old structures */ 37 CHKERRQ(PetscCommDestroy(&(psubcomm)->dupparent)); 38 CHKERRQ(PetscCommDestroy(&(psubcomm)->child)); 39 CHKERRQ(PetscFree((psubcomm)->subsize)); 40 switch (type) { 41 case PETSC_SUBCOMM_GENERAL: 42 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Runtime option PETSC_SUBCOMM_GENERAL is not supported, use PetscSubcommSetTypeGeneral()"); 43 case PETSC_SUBCOMM_CONTIGUOUS: 44 CHKERRQ(PetscSubcommCreate_contiguous(psubcomm)); 45 break; 46 case PETSC_SUBCOMM_INTERLACED: 47 CHKERRQ(PetscSubcommCreate_interlaced(psubcomm)); 48 break; 49 default: 50 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[type]); 51 } 52 } 53 54 CHKERRQ(PetscOptionsName("-psubcomm_view","Triggers display of PetscSubcomm context","PetscSubcommView",&flg)); 55 if (flg) { 56 CHKERRQ(PetscSubcommView(psubcomm,PETSC_VIEWER_STDOUT_(psubcomm->parent))); 57 } 58 ierr = PetscOptionsEnd();CHKERRQ(ierr); 59 PetscFunctionReturn(0); 60 } 61 62 /*@C 63 PetscSubcommSetOptionsPrefix - Sets the prefix used for searching for all 64 PetscSubcomm items in the options database. 65 66 Logically collective on PetscSubcomm. 67 68 Level: Intermediate 69 70 Input Parameters: 71 + psubcomm - PetscSubcomm context 72 - prefix - the prefix to prepend all PetscSubcomm item names with. 73 74 @*/ 75 PetscErrorCode PetscSubcommSetOptionsPrefix(PetscSubcomm psubcomm,const char pre[]) 76 { 77 PetscFunctionBegin; 78 if (!pre) { 79 CHKERRQ(PetscFree(psubcomm->subcommprefix)); 80 } else { 81 PetscCheckFalse(pre[0] == '-',PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Options prefix should not begin with a hyphen"); 82 CHKERRQ(PetscFree(psubcomm->subcommprefix)); 83 CHKERRQ(PetscStrallocpy(pre,&(psubcomm->subcommprefix))); 84 } 85 PetscFunctionReturn(0); 86 } 87 88 /*@C 89 PetscSubcommView - Views a PetscSubcomm of values as either ASCII text or a binary file 90 91 Collective on PetscSubcomm 92 93 Input Parameters: 94 + psubcomm - PetscSubcomm context 95 - viewer - location to view the values 96 97 Level: beginner 98 @*/ 99 PetscErrorCode PetscSubcommView(PetscSubcomm psubcomm,PetscViewer viewer) 100 { 101 PetscBool iascii; 102 PetscViewerFormat format; 103 104 PetscFunctionBegin; 105 CHKERRQ(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii)); 106 if (iascii) { 107 CHKERRQ(PetscViewerGetFormat(viewer,&format)); 108 if (format == PETSC_VIEWER_DEFAULT) { 109 MPI_Comm comm=psubcomm->parent; 110 PetscMPIInt rank,size,subsize,subrank,duprank; 111 112 CHKERRMPI(MPI_Comm_size(comm,&size)); 113 CHKERRQ(PetscViewerASCIIPrintf(viewer,"PetscSubcomm type %s with total %d MPI processes:\n",PetscSubcommTypes[psubcomm->type],size)); 114 CHKERRMPI(MPI_Comm_rank(comm,&rank)); 115 CHKERRMPI(MPI_Comm_size(psubcomm->child,&subsize)); 116 CHKERRMPI(MPI_Comm_rank(psubcomm->child,&subrank)); 117 CHKERRMPI(MPI_Comm_rank(psubcomm->dupparent,&duprank)); 118 CHKERRQ(PetscViewerASCIIPushSynchronized(viewer)); 119 CHKERRQ(PetscViewerASCIISynchronizedPrintf(viewer," [%d], color %d, sub-size %d, sub-rank %d, duprank %d\n",rank,psubcomm->color,subsize,subrank,duprank)); 120 CHKERRQ(PetscViewerFlush(viewer)); 121 CHKERRQ(PetscViewerASCIIPopSynchronized(viewer)); 122 } 123 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported yet"); 124 PetscFunctionReturn(0); 125 } 126 127 /*@ 128 PetscSubcommSetNumber - Set total number of subcommunicators. 129 130 Collective 131 132 Input Parameters: 133 + psubcomm - PetscSubcomm context 134 - nsubcomm - the total number of subcommunicators in psubcomm 135 136 Level: advanced 137 138 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetType(),PetscSubcommSetTypeGeneral() 139 @*/ 140 PetscErrorCode PetscSubcommSetNumber(PetscSubcomm psubcomm,PetscInt nsubcomm) 141 { 142 MPI_Comm comm=psubcomm->parent; 143 PetscMPIInt msub,size; 144 145 PetscFunctionBegin; 146 PetscCheckFalse(!psubcomm,PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate() first"); 147 CHKERRMPI(MPI_Comm_size(comm,&size)); 148 CHKERRQ(PetscMPIIntCast(nsubcomm,&msub)); 149 PetscCheckFalse(msub < 1 || msub > size,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Num of subcommunicators %d cannot be < 1 or > input comm size %d",msub,size); 150 151 psubcomm->n = msub; 152 PetscFunctionReturn(0); 153 } 154 155 /*@ 156 PetscSubcommSetType - Set type of subcommunicators. 157 158 Collective 159 160 Input Parameters: 161 + psubcomm - PetscSubcomm context 162 - subcommtype - subcommunicator type, PETSC_SUBCOMM_CONTIGUOUS,PETSC_SUBCOMM_INTERLACED 163 164 Level: advanced 165 166 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetTypeGeneral(), PetscSubcommType 167 @*/ 168 PetscErrorCode PetscSubcommSetType(PetscSubcomm psubcomm,PetscSubcommType subcommtype) 169 { 170 PetscFunctionBegin; 171 PetscCheckFalse(!psubcomm,PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()"); 172 PetscCheckFalse(psubcomm->n < 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",psubcomm->n); 173 174 if (subcommtype == PETSC_SUBCOMM_CONTIGUOUS) { 175 CHKERRQ(PetscSubcommCreate_contiguous(psubcomm)); 176 } else if (subcommtype == PETSC_SUBCOMM_INTERLACED) { 177 CHKERRQ(PetscSubcommCreate_interlaced(psubcomm)); 178 } else SETERRQ(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %s is not supported yet",PetscSubcommTypes[subcommtype]); 179 PetscFunctionReturn(0); 180 } 181 182 /*@ 183 PetscSubcommSetTypeGeneral - Set a PetscSubcomm from user's specifications 184 185 Collective 186 187 Input Parameters: 188 + psubcomm - PetscSubcomm context 189 . color - control of subset assignment (nonnegative integer). Processes with the same color are in the same subcommunicator. 190 - subrank - rank in the subcommunicator 191 192 Level: advanced 193 194 .seealso: PetscSubcommCreate(),PetscSubcommDestroy(),PetscSubcommSetNumber(),PetscSubcommSetType() 195 @*/ 196 PetscErrorCode PetscSubcommSetTypeGeneral(PetscSubcomm psubcomm,PetscMPIInt color,PetscMPIInt subrank) 197 { 198 MPI_Comm subcomm=0,dupcomm=0,comm=psubcomm->parent; 199 PetscMPIInt size,icolor,duprank,*recvbuf,sendbuf[3],mysubsize,rank,*subsize; 200 PetscMPIInt i,nsubcomm=psubcomm->n; 201 202 PetscFunctionBegin; 203 PetscCheckFalse(!psubcomm,PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"PetscSubcomm is not created. Call PetscSubcommCreate()"); 204 PetscCheckFalse(nsubcomm < 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subcommunicators %d is incorrect. Call PetscSubcommSetNumber()",nsubcomm); 205 206 CHKERRMPI(MPI_Comm_split(comm,color,subrank,&subcomm)); 207 208 /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */ 209 /* TODO: this can be done in an ostensibly scalale way (i.e., without allocating an array of size 'size') as is done in PetscObjectsCreateGlobalOrdering(). */ 210 CHKERRMPI(MPI_Comm_size(comm,&size)); 211 CHKERRQ(PetscMalloc1(2*size,&recvbuf)); 212 213 CHKERRMPI(MPI_Comm_rank(comm,&rank)); 214 CHKERRMPI(MPI_Comm_size(subcomm,&mysubsize)); 215 216 sendbuf[0] = color; 217 sendbuf[1] = mysubsize; 218 CHKERRMPI(MPI_Allgather(sendbuf,2,MPI_INT,recvbuf,2,MPI_INT,comm)); 219 220 CHKERRQ(PetscCalloc1(nsubcomm,&subsize)); 221 for (i=0; i<2*size; i+=2) { 222 subsize[recvbuf[i]] = recvbuf[i+1]; 223 } 224 CHKERRQ(PetscFree(recvbuf)); 225 226 duprank = 0; 227 for (icolor=0; icolor<nsubcomm; icolor++) { 228 if (icolor != color) { /* not color of this process */ 229 duprank += subsize[icolor]; 230 } else { 231 duprank += subrank; 232 break; 233 } 234 } 235 CHKERRMPI(MPI_Comm_split(comm,0,duprank,&dupcomm)); 236 237 CHKERRQ(PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL)); 238 CHKERRQ(PetscCommDuplicate(subcomm,&psubcomm->child,NULL)); 239 CHKERRMPI(MPI_Comm_free(&dupcomm)); 240 CHKERRMPI(MPI_Comm_free(&subcomm)); 241 242 psubcomm->color = color; 243 psubcomm->subsize = subsize; 244 psubcomm->type = PETSC_SUBCOMM_GENERAL; 245 PetscFunctionReturn(0); 246 } 247 248 /*@ 249 PetscSubcommDestroy - Destroys a PetscSubcomm object 250 251 Collective on PetscSubcomm 252 253 Input Parameter: 254 . psubcomm - the PetscSubcomm context 255 256 Level: advanced 257 258 .seealso: PetscSubcommCreate(),PetscSubcommSetType() 259 @*/ 260 PetscErrorCode PetscSubcommDestroy(PetscSubcomm *psubcomm) 261 { 262 PetscFunctionBegin; 263 if (!*psubcomm) PetscFunctionReturn(0); 264 CHKERRQ(PetscCommDestroy(&(*psubcomm)->dupparent)); 265 CHKERRQ(PetscCommDestroy(&(*psubcomm)->child)); 266 CHKERRQ(PetscFree((*psubcomm)->subsize)); 267 if ((*psubcomm)->subcommprefix) CHKERRQ(PetscFree((*psubcomm)->subcommprefix)); 268 CHKERRQ(PetscFree((*psubcomm))); 269 PetscFunctionReturn(0); 270 } 271 272 /*@ 273 PetscSubcommCreate - Create a PetscSubcomm context. 274 275 Collective 276 277 Input Parameter: 278 . comm - MPI communicator 279 280 Output Parameter: 281 . psubcomm - location to store the PetscSubcomm context 282 283 Level: advanced 284 285 .seealso: PetscSubcommDestroy(), PetscSubcommSetTypeGeneral(), PetscSubcommSetFromOptions(), PetscSubcommSetType(), 286 PetscSubcommSetNumber() 287 @*/ 288 PetscErrorCode PetscSubcommCreate(MPI_Comm comm,PetscSubcomm *psubcomm) 289 { 290 PetscMPIInt rank,size; 291 292 PetscFunctionBegin; 293 CHKERRQ(PetscNew(psubcomm)); 294 295 /* set defaults */ 296 CHKERRMPI(MPI_Comm_rank(comm,&rank)); 297 CHKERRMPI(MPI_Comm_size(comm,&size)); 298 299 (*psubcomm)->parent = comm; 300 (*psubcomm)->dupparent = comm; 301 (*psubcomm)->child = PETSC_COMM_SELF; 302 (*psubcomm)->n = size; 303 (*psubcomm)->color = rank; 304 (*psubcomm)->subsize = NULL; 305 (*psubcomm)->type = PETSC_SUBCOMM_INTERLACED; 306 PetscFunctionReturn(0); 307 } 308 309 /*@C 310 PetscSubcommGetParent - Gets the communicator that was used to create the PetscSubcomm 311 312 Collective 313 314 Input Parameter: 315 . scomm - the PetscSubcomm 316 317 Output Parameter: 318 . pcomm - location to store the parent communicator 319 320 Level: intermediate 321 322 .seealso: PetscSubcommDestroy(), PetscSubcommSetTypeGeneral(), PetscSubcommSetFromOptions(), PetscSubcommSetType(), 323 PetscSubcommSetNumber(), PetscSubcommGetChild(), PetscSubcommContiguousParent() 324 @*/ 325 PetscErrorCode PetscSubcommGetParent(PetscSubcomm scomm,MPI_Comm *pcomm) 326 { 327 *pcomm = PetscSubcommParent(scomm); 328 return 0; 329 } 330 331 /*@C 332 PetscSubcommGetContiguousParent - Gets a communicator that that is a duplicate of the parent but has the ranks 333 reordered by the order they are in the children 334 335 Collective 336 337 Input Parameter: 338 . scomm - the PetscSubcomm 339 340 Output Parameter: 341 . pcomm - location to store the parent communicator 342 343 Level: intermediate 344 345 .seealso: PetscSubcommDestroy(), PetscSubcommSetTypeGeneral(), PetscSubcommSetFromOptions(), PetscSubcommSetType(), 346 PetscSubcommSetNumber(), PetscSubcommGetChild(), PetscSubcommContiguousParent() 347 @*/ 348 PetscErrorCode PetscSubcommGetContiguousParent(PetscSubcomm scomm,MPI_Comm *pcomm) 349 { 350 *pcomm = PetscSubcommContiguousParent(scomm); 351 return 0; 352 } 353 354 /*@C 355 PetscSubcommGetChild - Gets the communicator created by the PetscSubcomm 356 357 Collective 358 359 Input Parameter: 360 . scomm - the PetscSubcomm 361 362 Output Parameter: 363 . ccomm - location to store the child communicator 364 365 Level: intermediate 366 367 .seealso: PetscSubcommDestroy(), PetscSubcommSetTypeGeneral(), PetscSubcommSetFromOptions(), PetscSubcommSetType(), 368 PetscSubcommSetNumber(), PetscSubcommGetParent(), PetscSubcommContiguousParent() 369 @*/ 370 PetscErrorCode PetscSubcommGetChild(PetscSubcomm scomm,MPI_Comm *ccomm) 371 { 372 *ccomm = PetscSubcommChild(scomm); 373 return 0; 374 } 375 376 static PetscErrorCode PetscSubcommCreate_contiguous(PetscSubcomm psubcomm) 377 { 378 PetscMPIInt rank,size,*subsize,duprank=-1,subrank=-1; 379 PetscMPIInt np_subcomm,nleftover,i,color=-1,rankstart,nsubcomm=psubcomm->n; 380 MPI_Comm subcomm=0,dupcomm=0,comm=psubcomm->parent; 381 382 PetscFunctionBegin; 383 CHKERRMPI(MPI_Comm_rank(comm,&rank)); 384 CHKERRMPI(MPI_Comm_size(comm,&size)); 385 386 /* get size of each subcommunicator */ 387 CHKERRQ(PetscMalloc1(1+nsubcomm,&subsize)); 388 389 np_subcomm = size/nsubcomm; 390 nleftover = size - nsubcomm*np_subcomm; 391 for (i=0; i<nsubcomm; i++) { 392 subsize[i] = np_subcomm; 393 if (i<nleftover) subsize[i]++; 394 } 395 396 /* get color and subrank of this proc */ 397 rankstart = 0; 398 for (i=0; i<nsubcomm; i++) { 399 if (rank >= rankstart && rank < rankstart+subsize[i]) { 400 color = i; 401 subrank = rank - rankstart; 402 duprank = rank; 403 break; 404 } else rankstart += subsize[i]; 405 } 406 407 CHKERRMPI(MPI_Comm_split(comm,color,subrank,&subcomm)); 408 409 /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */ 410 CHKERRMPI(MPI_Comm_split(comm,0,duprank,&dupcomm)); 411 CHKERRQ(PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL)); 412 CHKERRQ(PetscCommDuplicate(subcomm,&psubcomm->child,NULL)); 413 CHKERRMPI(MPI_Comm_free(&dupcomm)); 414 CHKERRMPI(MPI_Comm_free(&subcomm)); 415 416 psubcomm->color = color; 417 psubcomm->subsize = subsize; 418 psubcomm->type = PETSC_SUBCOMM_CONTIGUOUS; 419 PetscFunctionReturn(0); 420 } 421 422 /* 423 Note: 424 In PCREDUNDANT, to avoid data scattering from subcomm back to original comm, we create subcommunicators 425 by iteratively taking a process into a subcommunicator. 426 Example: size=4, nsubcomm=(*psubcomm)->n=3 427 comm=(*psubcomm)->parent: 428 rank: [0] [1] [2] [3] 429 color: 0 1 2 0 430 431 subcomm=(*psubcomm)->comm: 432 subrank: [0] [0] [0] [1] 433 434 dupcomm=(*psubcomm)->dupparent: 435 duprank: [0] [2] [3] [1] 436 437 Here, subcomm[color = 0] has subsize=2, owns process [0] and [3] 438 subcomm[color = 1] has subsize=1, owns process [1] 439 subcomm[color = 2] has subsize=1, owns process [2] 440 dupcomm has same number of processes as comm, and its duprank maps 441 processes in subcomm contiguously into a 1d array: 442 duprank: [0] [1] [2] [3] 443 rank: [0] [3] [1] [2] 444 subcomm[0] subcomm[1] subcomm[2] 445 */ 446 447 static PetscErrorCode PetscSubcommCreate_interlaced(PetscSubcomm psubcomm) 448 { 449 PetscMPIInt rank,size,*subsize,duprank,subrank; 450 PetscMPIInt np_subcomm,nleftover,i,j,color,nsubcomm=psubcomm->n; 451 MPI_Comm subcomm=0,dupcomm=0,comm=psubcomm->parent; 452 453 PetscFunctionBegin; 454 CHKERRMPI(MPI_Comm_rank(comm,&rank)); 455 CHKERRMPI(MPI_Comm_size(comm,&size)); 456 457 /* get size of each subcommunicator */ 458 CHKERRQ(PetscMalloc1(1+nsubcomm,&subsize)); 459 460 np_subcomm = size/nsubcomm; 461 nleftover = size - nsubcomm*np_subcomm; 462 for (i=0; i<nsubcomm; i++) { 463 subsize[i] = np_subcomm; 464 if (i<nleftover) subsize[i]++; 465 } 466 467 /* find color for this proc */ 468 color = rank%nsubcomm; 469 subrank = rank/nsubcomm; 470 471 CHKERRMPI(MPI_Comm_split(comm,color,subrank,&subcomm)); 472 473 j = 0; duprank = 0; 474 for (i=0; i<nsubcomm; i++) { 475 if (j == color) { 476 duprank += subrank; 477 break; 478 } 479 duprank += subsize[i]; j++; 480 } 481 482 /* create dupcomm with same size as comm, but its rank, duprank, maps subcomm's contiguously into dupcomm */ 483 CHKERRMPI(MPI_Comm_split(comm,0,duprank,&dupcomm)); 484 CHKERRQ(PetscCommDuplicate(dupcomm,&psubcomm->dupparent,NULL)); 485 CHKERRQ(PetscCommDuplicate(subcomm,&psubcomm->child,NULL)); 486 CHKERRMPI(MPI_Comm_free(&dupcomm)); 487 CHKERRMPI(MPI_Comm_free(&subcomm)); 488 489 psubcomm->color = color; 490 psubcomm->subsize = subsize; 491 psubcomm->type = PETSC_SUBCOMM_INTERLACED; 492 PetscFunctionReturn(0); 493 } 494