1 #include <petsc/private/dmimpl.h> 2 #include <petscdmredundant.h> /*I "petscdmredundant.h" I*/ 3 4 typedef struct { 5 PetscMPIInt rank; /* owner */ 6 PetscInt N; /* total number of dofs */ 7 PetscInt n; /* owned number of dofs, n=N on owner, n=0 on non-owners */ 8 } DM_Redundant; 9 10 #undef __FUNCT__ 11 #define __FUNCT__ "DMCreateMatrix_Redundant" 12 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J) 13 { 14 DM_Redundant *red = (DM_Redundant*)dm->data; 15 PetscErrorCode ierr; 16 ISLocalToGlobalMapping ltog; 17 PetscInt i,rstart,rend,*cols; 18 PetscScalar *vals; 19 20 PetscFunctionBegin; 21 ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr); 22 ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr); 23 ierr = MatSetType(*J,dm->mattype);CHKERRQ(ierr); 24 ierr = MatSeqAIJSetPreallocation(*J,red->n,NULL);CHKERRQ(ierr); 25 ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);CHKERRQ(ierr); 26 ierr = MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr); 27 ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr); 28 29 ierr = DMGetLocalToGlobalMapping(dm,<og);CHKERRQ(ierr); 30 ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr); 31 32 ierr = PetscMalloc2(red->N,&cols,red->N,&vals);CHKERRQ(ierr); 33 for (i=0; i<red->N; i++) { 34 cols[i] = i; 35 vals[i] = 0.0; 36 } 37 ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr); 38 for (i=rstart; i<rend; i++) { 39 ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 40 } 41 ierr = PetscFree2(cols,vals);CHKERRQ(ierr); 42 ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 43 ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 44 PetscFunctionReturn(0); 45 } 46 47 #undef __FUNCT__ 48 #define __FUNCT__ "DMDestroy_Redundant" 49 static PetscErrorCode DMDestroy_Redundant(DM dm) 50 { 51 PetscErrorCode ierr; 52 53 PetscFunctionBegin; 54 ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);CHKERRQ(ierr); 55 ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);CHKERRQ(ierr); 56 /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */ 57 ierr = PetscFree(dm->data);CHKERRQ(ierr); 58 PetscFunctionReturn(0); 59 } 60 61 #undef __FUNCT__ 62 #define __FUNCT__ "DMCreateGlobalVector_Redundant" 63 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec) 64 { 65 PetscErrorCode ierr; 66 DM_Redundant *red = (DM_Redundant*)dm->data; 67 ISLocalToGlobalMapping ltog; 68 69 PetscFunctionBegin; 70 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 71 PetscValidPointer(gvec,2); 72 *gvec = 0; 73 ierr = VecCreate(PetscObjectComm((PetscObject)dm),gvec);CHKERRQ(ierr); 74 ierr = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr); 75 ierr = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr); 76 ierr = DMGetLocalToGlobalMapping(dm,<og);CHKERRQ(ierr); 77 ierr = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr); 78 ierr = VecSetDM(*gvec,dm);CHKERRQ(ierr); 79 PetscFunctionReturn(0); 80 } 81 82 #undef __FUNCT__ 83 #define __FUNCT__ "DMCreateLocalVector_Redundant" 84 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec) 85 { 86 PetscErrorCode ierr; 87 DM_Redundant *red = (DM_Redundant*)dm->data; 88 89 PetscFunctionBegin; 90 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 91 PetscValidPointer(lvec,2); 92 *lvec = 0; 93 ierr = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr); 94 ierr = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr); 95 ierr = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr); 96 ierr = VecSetDM(*lvec,dm);CHKERRQ(ierr); 97 PetscFunctionReturn(0); 98 } 99 100 #undef __FUNCT__ 101 #define __FUNCT__ "DMLocalToGlobalBegin_Redundant" 102 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g) 103 { 104 PetscErrorCode ierr; 105 DM_Redundant *red = (DM_Redundant*)dm->data; 106 const PetscScalar *lv; 107 PetscScalar *gv; 108 PetscMPIInt rank; 109 110 PetscFunctionBegin; 111 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);CHKERRQ(ierr); 112 ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr); 113 ierr = VecGetArray(g,&gv);CHKERRQ(ierr); 114 switch (imode) { 115 case ADD_VALUES: 116 case MAX_VALUES: 117 { 118 void *source; 119 PetscScalar *buffer; 120 PetscInt i; 121 if (rank == red->rank) { 122 #if defined(PETSC_HAVE_MPI_IN_PLACE) 123 buffer = gv; 124 source = MPI_IN_PLACE; 125 #else 126 ierr = PetscMalloc1(red->N,&buffer);CHKERRQ(ierr); 127 source = buffer; 128 #endif 129 if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i]; 130 #if !defined(PETSC_USE_COMPLEX) 131 if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]); 132 #endif 133 } else source = (void*)lv; 134 ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr); 135 #if !defined(PETSC_HAVE_MPI_IN_PLACE) 136 if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);} 137 #endif 138 } break; 139 case INSERT_VALUES: 140 ierr = PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));CHKERRQ(ierr); 141 break; 142 default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported"); 143 } 144 ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr); 145 ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr); 146 PetscFunctionReturn(0); 147 } 148 149 #undef __FUNCT__ 150 #define __FUNCT__ "DMLocalToGlobalEnd_Redundant" 151 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g) 152 { 153 PetscFunctionBegin; 154 PetscFunctionReturn(0); 155 } 156 157 #undef __FUNCT__ 158 #define __FUNCT__ "DMGlobalToLocalBegin_Redundant" 159 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l) 160 { 161 PetscErrorCode ierr; 162 DM_Redundant *red = (DM_Redundant*)dm->data; 163 const PetscScalar *gv; 164 PetscScalar *lv; 165 166 PetscFunctionBegin; 167 ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr); 168 ierr = VecGetArray(l,&lv);CHKERRQ(ierr); 169 switch (imode) { 170 case INSERT_VALUES: 171 if (red->n) {ierr = PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);} 172 ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr); 173 break; 174 default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported"); 175 } 176 ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr); 177 ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr); 178 PetscFunctionReturn(0); 179 } 180 181 #undef __FUNCT__ 182 #define __FUNCT__ "DMGlobalToLocalEnd_Redundant" 183 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l) 184 { 185 PetscFunctionBegin; 186 PetscFunctionReturn(0); 187 } 188 189 #undef __FUNCT__ 190 #define __FUNCT__ "DMSetUp_Redundant" 191 static PetscErrorCode DMSetUp_Redundant(DM dm) 192 { 193 PetscErrorCode ierr; 194 DM_Redundant *red = (DM_Redundant*)dm->data; 195 PetscInt i,*globals; 196 197 PetscFunctionBegin; 198 ierr = PetscMalloc1(red->N,&globals);CHKERRQ(ierr); 199 for (i=0; i<red->N; i++) globals[i] = i; 200 ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr); 201 PetscFunctionReturn(0); 202 } 203 204 #undef __FUNCT__ 205 #define __FUNCT__ "DMView_Redundant" 206 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer) 207 { 208 PetscErrorCode ierr; 209 DM_Redundant *red = (DM_Redundant*)dm->data; 210 PetscBool iascii; 211 212 PetscFunctionBegin; 213 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 214 if (iascii) { 215 ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr); 216 } 217 PetscFunctionReturn(0); 218 } 219 220 #undef __FUNCT__ 221 #define __FUNCT__ "DMCreateColoring_Redundant" 222 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring) 223 { 224 DM_Redundant *red = (DM_Redundant*)dm->data; 225 PetscErrorCode ierr; 226 PetscInt i,nloc; 227 ISColoringValue *colors; 228 229 PetscFunctionBegin; 230 switch (ctype) { 231 case IS_COLORING_GLOBAL: 232 nloc = red->n; 233 break; 234 case IS_COLORING_GHOSTED: 235 nloc = red->N; 236 break; 237 default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype); 238 } 239 ierr = PetscMalloc1(nloc,&colors);CHKERRQ(ierr); 240 for (i=0; i<nloc; i++) colors[i] = i; 241 ierr = ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);CHKERRQ(ierr); 242 ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr); 243 PetscFunctionReturn(0); 244 } 245 246 #undef __FUNCT__ 247 #define __FUNCT__ "DMRefine_Redundant" 248 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf) 249 { 250 PetscErrorCode ierr; 251 PetscMPIInt flag; 252 DM_Redundant *redc = (DM_Redundant*)dmc->data; 253 254 PetscFunctionBegin; 255 if (comm == MPI_COMM_NULL) { 256 ierr = PetscObjectGetComm((PetscObject)dmc,&comm);CHKERRQ(ierr); 257 } 258 ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);CHKERRQ(ierr); 259 if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators"); 260 ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr); 261 PetscFunctionReturn(0); 262 } 263 264 #undef __FUNCT__ 265 #define __FUNCT__ "DMCoarsen_Redundant" 266 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc) 267 { 268 PetscErrorCode ierr; 269 PetscMPIInt flag; 270 DM_Redundant *redf = (DM_Redundant*)dmf->data; 271 272 PetscFunctionBegin; 273 if (comm == MPI_COMM_NULL) { 274 ierr = PetscObjectGetComm((PetscObject)dmf,&comm);CHKERRQ(ierr); 275 } 276 ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);CHKERRQ(ierr); 277 if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators"); 278 ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr); 279 PetscFunctionReturn(0); 280 } 281 282 #undef __FUNCT__ 283 #define __FUNCT__ "DMCreateInterpolation_Redundant" 284 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale) 285 { 286 PetscErrorCode ierr; 287 DM_Redundant *redc = (DM_Redundant*)dmc->data; 288 DM_Redundant *redf = (DM_Redundant*)dmf->data; 289 PetscMPIInt flag; 290 PetscInt i,rstart,rend; 291 292 PetscFunctionBegin; 293 ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);CHKERRQ(ierr); 294 if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators"); 295 if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match"); 296 if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match"); 297 ierr = MatCreate(PetscObjectComm((PetscObject)dmc),P);CHKERRQ(ierr); 298 ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr); 299 ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr); 300 ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr); 301 ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr); 302 ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr); 303 for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);} 304 ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 305 ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 306 if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);} 307 PetscFunctionReturn(0); 308 } 309 310 #undef __FUNCT__ 311 #define __FUNCT__ "DMRedundantSetSize" 312 /*@ 313 DMRedundantSetSize - Sets the size of a densely coupled redundant object 314 315 Collective on DM 316 317 Input Parameter: 318 + dm - redundant DM 319 . rank - rank of process to own redundant degrees of freedom 320 - N - total number of redundant degrees of freedom 321 322 Level: advanced 323 324 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize() 325 @*/ 326 PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N) 327 { 328 PetscErrorCode ierr; 329 330 PetscFunctionBegin; 331 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 332 PetscValidType(dm,1); 333 PetscValidLogicalCollectiveInt(dm,rank,2); 334 PetscValidLogicalCollectiveInt(dm,N,3); 335 ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));CHKERRQ(ierr); 336 PetscFunctionReturn(0); 337 } 338 339 #undef __FUNCT__ 340 #define __FUNCT__ "DMRedundantGetSize" 341 /*@ 342 DMRedundantGetSize - Gets the size of a densely coupled redundant object 343 344 Not Collective 345 346 Input Parameter: 347 + dm - redundant DM 348 349 Output Parameters: 350 + rank - rank of process to own redundant degrees of freedom (or NULL) 351 - N - total number of redundant degrees of freedom (or NULL) 352 353 Level: advanced 354 355 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize() 356 @*/ 357 PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N) 358 { 359 PetscErrorCode ierr; 360 361 PetscFunctionBegin; 362 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 363 PetscValidType(dm,1); 364 ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr); 365 PetscFunctionReturn(0); 366 } 367 368 #undef __FUNCT__ 369 #define __FUNCT__ "DMRedundantSetSize_Redundant" 370 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N) 371 { 372 DM_Redundant *red = (DM_Redundant*)dm->data; 373 PetscErrorCode ierr; 374 PetscMPIInt myrank; 375 376 PetscFunctionBegin; 377 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);CHKERRQ(ierr); 378 red->rank = rank; 379 red->N = N; 380 red->n = (myrank == rank) ? N : 0; 381 PetscFunctionReturn(0); 382 } 383 384 #undef __FUNCT__ 385 #define __FUNCT__ "DMRedundantGetSize_Redundant" 386 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N) 387 { 388 DM_Redundant *red = (DM_Redundant*)dm->data; 389 390 PetscFunctionBegin; 391 if (rank) *rank = red->rank; 392 if (N) *N = red->N; 393 PetscFunctionReturn(0); 394 } 395 396 /*MC 397 DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables. 398 In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have 399 no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each 400 processes local computations). 401 402 This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem. 403 404 405 Level: intermediate 406 407 .seealso: DMType, DMCOMPOSITE, DMCreate(), DMRedundantSetSize(), DMRedundantGetSize() 408 M*/ 409 410 #undef __FUNCT__ 411 #define __FUNCT__ "DMCreate_Redundant" 412 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm) 413 { 414 PetscErrorCode ierr; 415 DM_Redundant *red; 416 417 PetscFunctionBegin; 418 ierr = PetscNewLog(dm,&red);CHKERRQ(ierr); 419 dm->data = red; 420 421 ierr = PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);CHKERRQ(ierr); 422 423 dm->ops->setup = DMSetUp_Redundant; 424 dm->ops->view = DMView_Redundant; 425 dm->ops->createglobalvector = DMCreateGlobalVector_Redundant; 426 dm->ops->createlocalvector = DMCreateLocalVector_Redundant; 427 dm->ops->creatematrix = DMCreateMatrix_Redundant; 428 dm->ops->destroy = DMDestroy_Redundant; 429 dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant; 430 dm->ops->globaltolocalend = DMGlobalToLocalEnd_Redundant; 431 dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant; 432 dm->ops->localtoglobalend = DMLocalToGlobalEnd_Redundant; 433 dm->ops->refine = DMRefine_Redundant; 434 dm->ops->coarsen = DMCoarsen_Redundant; 435 dm->ops->createinterpolation= DMCreateInterpolation_Redundant; 436 dm->ops->getcoloring = DMCreateColoring_Redundant; 437 438 ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);CHKERRQ(ierr); 439 ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);CHKERRQ(ierr); 440 PetscFunctionReturn(0); 441 } 442 443 #undef __FUNCT__ 444 #define __FUNCT__ "DMRedundantCreate" 445 /*@C 446 DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables 447 448 Collective on MPI_Comm 449 450 Input Parameter: 451 + comm - the processors that will share the global vector 452 . rank - rank to own the redundant values 453 - N - total number of degrees of freedom 454 455 Output Parameters: 456 . dm - the redundant DM 457 458 Level: advanced 459 460 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize() 461 462 @*/ 463 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm) 464 { 465 PetscErrorCode ierr; 466 467 PetscFunctionBegin; 468 PetscValidPointer(dm,2); 469 ierr = DMCreate(comm,dm);CHKERRQ(ierr); 470 ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr); 471 ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr); 472 ierr = DMSetUp(*dm);CHKERRQ(ierr); 473 PetscFunctionReturn(0); 474 } 475