Actual source code: telescope_dmda.c


  2: #include <petsc/private/matimpl.h>
  3: #include <petsc/private/pcimpl.h>
  4: #include <petsc/private/dmimpl.h>
  5: #include <petscksp.h>
  6: #include <petscdm.h>
  7: #include <petscdmda.h>

  9: #include "../src/ksp/pc/impls/telescope/telescope.h"

 11: static PetscBool  cited = PETSC_FALSE;
 12: static const char citation[] =
 13: "@inproceedings{MaySananRuppKnepleySmith2016,\n"
 14: "  title     = {Extreme-Scale Multigrid Components within PETSc},\n"
 15: "  author    = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
 16: "  booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
 17: "  series    = {PASC '16},\n"
 18: "  isbn      = {978-1-4503-4126-4},\n"
 19: "  location  = {Lausanne, Switzerland},\n"
 20: "  pages     = {5:1--5:12},\n"
 21: "  articleno = {5},\n"
 22: "  numpages  = {12},\n"
 23: "  url       = {https://doi.acm.org/10.1145/2929908.2929913},\n"
 24: "  doi       = {10.1145/2929908.2929913},\n"
 25: "  acmid     = {2929913},\n"
 26: "  publisher = {ACM},\n"
 27: "  address   = {New York, NY, USA},\n"
 28: "  keywords  = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
 29: "  year      = {2016}\n"
 30: "}\n";

 32: static PetscErrorCode _DMDADetermineRankFromGlobalIJK(PetscInt dim,PetscInt i,PetscInt j,PetscInt k,
 33:                                                       PetscInt Mp,PetscInt Np,PetscInt Pp,
 34:                                                       PetscInt start_i[],PetscInt start_j[],PetscInt start_k[],
 35:                                                       PetscInt span_i[],PetscInt span_j[],PetscInt span_k[],
 36:                                                       PetscMPIInt *_pi,PetscMPIInt *_pj,PetscMPIInt *_pk,PetscMPIInt *rank_re)
 37: {
 38:   PetscInt pi,pj,pk,n;

 40:   *rank_re = -1;
 41:   if (_pi) *_pi = -1;
 42:   if (_pj) *_pj = -1;
 43:   if (_pk) *_pk = -1;
 44:   pi = pj = pk = -1;
 45:   if (_pi) {
 46:     for (n=0; n<Mp; n++) {
 47:       if ((i >= start_i[n]) && (i < start_i[n]+span_i[n])) {
 48:         pi = n;
 49:         break;
 50:       }
 51:     }
 53:     *_pi = pi;
 54:   }

 56:   if (_pj) {
 57:     for (n=0; n<Np; n++) {
 58:       if ((j >= start_j[n]) && (j < start_j[n]+span_j[n])) {
 59:         pj = n;
 60:         break;
 61:       }
 62:     }
 64:     *_pj = pj;
 65:   }

 67:   if (_pk) {
 68:     for (n=0; n<Pp; n++) {
 69:       if ((k >= start_k[n]) && (k < start_k[n]+span_k[n])) {
 70:         pk = n;
 71:         break;
 72:       }
 73:     }
 75:     *_pk = pk;
 76:   }

 78:   switch (dim) {
 79:   case 1:
 80:     *rank_re = pi;
 81:     break;
 82:   case 2:
 83:     *rank_re = pi + pj * Mp;
 84:     break;
 85:   case 3:
 86:     *rank_re = pi + pj * Mp + pk * (Mp*Np);
 87:     break;
 88:   }
 89:   return 0;
 90: }

 92: static PetscErrorCode _DMDADetermineGlobalS0(PetscInt dim,PetscMPIInt rank_re,PetscInt Mp_re,PetscInt Np_re,PetscInt Pp_re,
 93:                                       PetscInt range_i_re[],PetscInt range_j_re[],PetscInt range_k_re[],PetscInt *s0)
 94: {
 95:   PetscInt i,j,k,start_IJK = 0;
 96:   PetscInt rank_ijk;

 98:   switch (dim) {
 99:   case 1:
100:     for (i=0; i<Mp_re; i++) {
101:       rank_ijk = i;
102:       if (rank_ijk < rank_re) {
103:         start_IJK += range_i_re[i];
104:       }
105:     }
106:     break;
107:     case 2:
108:     for (j=0; j<Np_re; j++) {
109:       for (i=0; i<Mp_re; i++) {
110:         rank_ijk = i + j*Mp_re;
111:         if (rank_ijk < rank_re) {
112:           start_IJK += range_i_re[i]*range_j_re[j];
113:         }
114:       }
115:     }
116:     break;
117:     case 3:
118:     for (k=0; k<Pp_re; k++) {
119:       for (j=0; j<Np_re; j++) {
120:         for (i=0; i<Mp_re; i++) {
121:           rank_ijk = i + j*Mp_re + k*Mp_re*Np_re;
122:           if (rank_ijk < rank_re) {
123:             start_IJK += range_i_re[i]*range_j_re[j]*range_k_re[k];
124:           }
125:         }
126:       }
127:     }
128:     break;
129:   }
130:   *s0 = start_IJK;
131:   return 0;
132: }

134: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors2d(PC_Telescope sred,DM dm,DM subdm)
135: {
136:   DM             cdm;
137:   Vec            coor,coor_natural,perm_coors;
138:   PetscInt       i,j,si,sj,ni,nj,M,N,Ml,Nl,c,nidx;
139:   PetscInt       *fine_indices;
140:   IS             is_fine,is_local;
141:   VecScatter     sctx;

143:   DMGetCoordinates(dm,&coor);
144:   if (!coor) return(0);
145:   if (PCTelescope_isActiveRank(sred)) {
146:     DMDASetUniformCoordinates(subdm,0.0,1.0,0.0,1.0,0.0,1.0);
147:   }
148:   /* Get the coordinate vector from the distributed array */
149:   DMGetCoordinateDM(dm,&cdm);
150:   DMDACreateNaturalVector(cdm,&coor_natural);

152:   DMDAGlobalToNaturalBegin(cdm,coor,INSERT_VALUES,coor_natural);
153:   DMDAGlobalToNaturalEnd(cdm,coor,INSERT_VALUES,coor_natural);

155:   /* get indices of the guys I want to grab */
156:   DMDAGetInfo(dm,NULL,&M,&N,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
157:   if (PCTelescope_isActiveRank(sred)) {
158:     DMDAGetCorners(subdm,&si,&sj,NULL,&ni,&nj,NULL);
159:     Ml = ni;
160:     Nl = nj;
161:   } else {
162:     si = sj = 0;
163:     ni = nj = 0;
164:     Ml = Nl = 0;
165:   }

167:   PetscMalloc1(Ml*Nl*2,&fine_indices);
168:   c = 0;
169:   if (PCTelescope_isActiveRank(sred)) {
170:     for (j=sj; j<sj+nj; j++) {
171:       for (i=si; i<si+ni; i++) {
172:         nidx = (i) + (j)*M;
173:         fine_indices[c  ] = 2 * nidx     ;
174:         fine_indices[c+1] = 2 * nidx + 1 ;
175:         c = c + 2;
176:       }
177:     }
179:   }

181:   /* generate scatter */
182:   ISCreateGeneral(PetscObjectComm((PetscObject)dm),Ml*Nl*2,fine_indices,PETSC_USE_POINTER,&is_fine);
183:   ISCreateStride(PETSC_COMM_SELF,Ml*Nl*2,0,1,&is_local);

185:   /* scatter */
186:   VecCreate(PETSC_COMM_SELF,&perm_coors);
187:   VecSetSizes(perm_coors,PETSC_DECIDE,Ml*Nl*2);
188:   VecSetType(perm_coors,VECSEQ);

190:   VecScatterCreate(coor_natural,is_fine,perm_coors,is_local,&sctx);
191:   VecScatterBegin(sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
192:   VecScatterEnd(  sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
193:   /* access */
194:   if (PCTelescope_isActiveRank(sred)) {
195:     Vec               _coors;
196:     const PetscScalar *LA_perm;
197:     PetscScalar       *LA_coors;

199:     DMGetCoordinates(subdm,&_coors);
200:     VecGetArrayRead(perm_coors,&LA_perm);
201:     VecGetArray(_coors,&LA_coors);
202:     for (i=0; i<Ml*Nl*2; i++) {
203:       LA_coors[i] = LA_perm[i];
204:     }
205:     VecRestoreArray(_coors,&LA_coors);
206:     VecRestoreArrayRead(perm_coors,&LA_perm);
207:   }

209:   /* update local coords */
210:   if (PCTelescope_isActiveRank(sred)) {
211:     DM  _dmc;
212:     Vec _coors,_coors_local;
213:     DMGetCoordinateDM(subdm,&_dmc);
214:     DMGetCoordinates(subdm,&_coors);
215:     DMGetCoordinatesLocal(subdm,&_coors_local);
216:     DMGlobalToLocalBegin(_dmc,_coors,INSERT_VALUES,_coors_local);
217:     DMGlobalToLocalEnd(_dmc,_coors,INSERT_VALUES,_coors_local);
218:   }
219:   VecScatterDestroy(&sctx);
220:   ISDestroy(&is_fine);
221:   PetscFree(fine_indices);
222:   ISDestroy(&is_local);
223:   VecDestroy(&perm_coors);
224:   VecDestroy(&coor_natural);
225:   return 0;
226: }

228: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors3d(PC_Telescope sred,DM dm,DM subdm)
229: {
230:   DM             cdm;
231:   Vec            coor,coor_natural,perm_coors;
232:   PetscInt       i,j,k,si,sj,sk,ni,nj,nk,M,N,P,Ml,Nl,Pl,c,nidx;
233:   PetscInt       *fine_indices;
234:   IS             is_fine,is_local;
235:   VecScatter     sctx;

237:   DMGetCoordinates(dm,&coor);
238:   if (!coor) return 0;

240:   if (PCTelescope_isActiveRank(sred)) {
241:     DMDASetUniformCoordinates(subdm,0.0,1.0,0.0,1.0,0.0,1.0);
242:   }

244:   /* Get the coordinate vector from the distributed array */
245:   DMGetCoordinateDM(dm,&cdm);
246:   DMDACreateNaturalVector(cdm,&coor_natural);
247:   DMDAGlobalToNaturalBegin(cdm,coor,INSERT_VALUES,coor_natural);
248:   DMDAGlobalToNaturalEnd(cdm,coor,INSERT_VALUES,coor_natural);

250:   /* get indices of the guys I want to grab */
251:   DMDAGetInfo(dm,NULL,&M,&N,&P,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);

253:   if (PCTelescope_isActiveRank(sred)) {
254:     DMDAGetCorners(subdm,&si,&sj,&sk,&ni,&nj,&nk);
255:     Ml = ni;
256:     Nl = nj;
257:     Pl = nk;
258:   } else {
259:     si = sj = sk = 0;
260:     ni = nj = nk = 0;
261:     Ml = Nl = Pl = 0;
262:   }

264:   PetscMalloc1(Ml*Nl*Pl*3,&fine_indices);

266:   c = 0;
267:   if (PCTelescope_isActiveRank(sred)) {
268:     for (k=sk; k<sk+nk; k++) {
269:       for (j=sj; j<sj+nj; j++) {
270:         for (i=si; i<si+ni; i++) {
271:           nidx = (i) + (j)*M + (k)*M*N;
272:           fine_indices[c  ] = 3 * nidx     ;
273:           fine_indices[c+1] = 3 * nidx + 1 ;
274:           fine_indices[c+2] = 3 * nidx + 2 ;
275:           c = c + 3;
276:         }
277:       }
278:     }
279:   }

281:   /* generate scatter */
282:   ISCreateGeneral(PetscObjectComm((PetscObject)dm),Ml*Nl*Pl*3,fine_indices,PETSC_USE_POINTER,&is_fine);
283:   ISCreateStride(PETSC_COMM_SELF,Ml*Nl*Pl*3,0,1,&is_local);

285:   /* scatter */
286:   VecCreate(PETSC_COMM_SELF,&perm_coors);
287:   VecSetSizes(perm_coors,PETSC_DECIDE,Ml*Nl*Pl*3);
288:   VecSetType(perm_coors,VECSEQ);
289:   VecScatterCreate(coor_natural,is_fine,perm_coors,is_local,&sctx);
290:   VecScatterBegin(sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);
291:   VecScatterEnd(  sctx,coor_natural,perm_coors,INSERT_VALUES,SCATTER_FORWARD);

293:   /* access */
294:   if (PCTelescope_isActiveRank(sred)) {
295:     Vec               _coors;
296:     const PetscScalar *LA_perm;
297:     PetscScalar       *LA_coors;

299:     DMGetCoordinates(subdm,&_coors);
300:     VecGetArrayRead(perm_coors,&LA_perm);
301:     VecGetArray(_coors,&LA_coors);
302:     for (i=0; i<Ml*Nl*Pl*3; i++) {
303:       LA_coors[i] = LA_perm[i];
304:     }
305:     VecRestoreArray(_coors,&LA_coors);
306:     VecRestoreArrayRead(perm_coors,&LA_perm);
307:   }

309:   /* update local coords */
310:   if (PCTelescope_isActiveRank(sred)) {
311:     DM  _dmc;
312:     Vec _coors,_coors_local;

314:     DMGetCoordinateDM(subdm,&_dmc);
315:     DMGetCoordinates(subdm,&_coors);
316:     DMGetCoordinatesLocal(subdm,&_coors_local);
317:     DMGlobalToLocalBegin(_dmc,_coors,INSERT_VALUES,_coors_local);
318:     DMGlobalToLocalEnd(_dmc,_coors,INSERT_VALUES,_coors_local);
319:   }

321:   VecScatterDestroy(&sctx);
322:   ISDestroy(&is_fine);
323:   PetscFree(fine_indices);
324:   ISDestroy(&is_local);
325:   VecDestroy(&perm_coors);
326:   VecDestroy(&coor_natural);
327:   return 0;
328: }

330: static PetscErrorCode PCTelescopeSetUp_dmda_repart_coors(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
331: {
332:   PetscInt       dim;
333:   DM             dm,subdm;
334:   PetscSubcomm   psubcomm;
335:   MPI_Comm       comm;
336:   Vec            coor;

338:   PCGetDM(pc,&dm);
339:   DMGetCoordinates(dm,&coor);
340:   if (!coor) return 0;
341:   psubcomm = sred->psubcomm;
342:   comm = PetscSubcommParent(psubcomm);
343:   subdm = ctx->dmrepart;

345:   PetscInfo(pc,"PCTelescope: setting up the coordinates (DMDA)\n");
346:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
347:   switch (dim) {
348:   case 1:
349:     SETERRQ(comm,PETSC_ERR_SUP,"Telescope: DMDA (1D) repartitioning not provided");
350:   case 2: PCTelescopeSetUp_dmda_repart_coors2d(sred,dm,subdm);
351:     break;
352:   case 3: PCTelescopeSetUp_dmda_repart_coors3d(sred,dm,subdm);
353:     break;
354:   }
355:   return 0;
356: }

358: /* setup repartitioned dm */
359: PetscErrorCode PCTelescopeSetUp_dmda_repart(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
360: {
361:   DM                    dm;
362:   PetscInt              dim,nx,ny,nz,ndof,nsw,sum,k;
363:   DMBoundaryType        bx,by,bz;
364:   DMDAStencilType       stencil;
365:   const PetscInt        *_range_i_re;
366:   const PetscInt        *_range_j_re;
367:   const PetscInt        *_range_k_re;
368:   DMDAInterpolationType itype;
369:   PetscInt              refine_x,refine_y,refine_z;
370:   MPI_Comm              comm,subcomm;
371:   const char            *prefix;

373:   comm = PetscSubcommParent(sred->psubcomm);
374:   subcomm = PetscSubcommChild(sred->psubcomm);
375:   PCGetDM(pc,&dm);

377:   DMDAGetInfo(dm,&dim,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,&nsw,&bx,&by,&bz,&stencil);
378:   DMDAGetInterpolationType(dm,&itype);
379:   DMDAGetRefinementFactor(dm,&refine_x,&refine_y,&refine_z);

381:   ctx->dmrepart = NULL;
382:   _range_i_re = _range_j_re = _range_k_re = NULL;
383:   /* Create DMDA on the child communicator */
384:   if (PCTelescope_isActiveRank(sred)) {
385:     switch (dim) {
386:     case 1:
387:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (1D)\n");
388:       /* DMDACreate1d(subcomm,bx,nx,ndof,nsw,NULL,&ctx->dmrepart); */
389:       ny = nz = 1;
390:       by = bz = DM_BOUNDARY_NONE;
391:       break;
392:     case 2:
393:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (2D)\n");
394:       /* PetscCall(DMDACreate2d(subcomm,bx,by,stencil,nx,ny, PETSC_DECIDE,PETSC_DECIDE,
395:          ndof,nsw, NULL,NULL,&ctx->dmrepart)); */
396:       nz = 1;
397:       bz = DM_BOUNDARY_NONE;
398:       break;
399:     case 3:
400:       PetscInfo(pc,"PCTelescope: setting up the DMDA on comm subset (3D)\n");
401:       /* PetscCall(DMDACreate3d(subcomm,bx,by,bz,stencil,nx,ny,nz,
402:          PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE, ndof,nsw, NULL,NULL,NULL,&ctx->dmrepart)); */
403:       break;
404:     }
405:     /*
406:      The API DMDACreate1d(), DMDACreate2d(), DMDACreate3d() does not allow us to set/append
407:      a unique option prefix for the DM, thus I prefer to expose the contents of these API's here.
408:      This allows users to control the partitioning of the subDM.
409:     */
410:     DMDACreate(subcomm,&ctx->dmrepart);
411:     /* Set unique option prefix name */
412:     KSPGetOptionsPrefix(sred->ksp,&prefix);
413:     DMSetOptionsPrefix(ctx->dmrepart,prefix);
414:     DMAppendOptionsPrefix(ctx->dmrepart,"repart_");
415:     /* standard setup from DMDACreate{1,2,3}d() */
416:     DMSetDimension(ctx->dmrepart,dim);
417:     DMDASetSizes(ctx->dmrepart,nx,ny,nz);
418:     DMDASetNumProcs(ctx->dmrepart,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE);
419:     DMDASetBoundaryType(ctx->dmrepart,bx,by,bz);
420:     DMDASetDof(ctx->dmrepart,ndof);
421:     DMDASetStencilType(ctx->dmrepart,stencil);
422:     DMDASetStencilWidth(ctx->dmrepart,nsw);
423:     DMDASetOwnershipRanges(ctx->dmrepart,NULL,NULL,NULL);
424:     DMSetFromOptions(ctx->dmrepart);
425:     DMSetUp(ctx->dmrepart);
426:     /* Set refinement factors and interpolation type from the partent */
427:     DMDASetRefinementFactor(ctx->dmrepart,refine_x,refine_y,refine_z);
428:     DMDASetInterpolationType(ctx->dmrepart,itype);

430:     DMDAGetInfo(ctx->dmrepart,NULL,NULL,NULL,NULL,&ctx->Mp_re,&ctx->Np_re,&ctx->Pp_re,NULL,NULL,NULL,NULL,NULL,NULL);
431:     DMDAGetOwnershipRanges(ctx->dmrepart,&_range_i_re,&_range_j_re,&_range_k_re);

433:     ctx->dmrepart->ops->creatematrix = dm->ops->creatematrix;
434:     ctx->dmrepart->ops->createdomaindecomposition = dm->ops->createdomaindecomposition;
435:   }

437:   /* generate ranges for repartitioned dm */
438:   /* note - assume rank 0 always participates */
439:   /* TODO: use a single MPI call */
440:   MPI_Bcast(&ctx->Mp_re,1,MPIU_INT,0,comm);
441:   MPI_Bcast(&ctx->Np_re,1,MPIU_INT,0,comm);
442:   MPI_Bcast(&ctx->Pp_re,1,MPIU_INT,0,comm);

444:   PetscCalloc3(ctx->Mp_re,&ctx->range_i_re,ctx->Np_re,&ctx->range_j_re,ctx->Pp_re,&ctx->range_k_re);

446:   if (_range_i_re) PetscArraycpy(ctx->range_i_re,_range_i_re,ctx->Mp_re);
447:   if (_range_j_re) PetscArraycpy(ctx->range_j_re,_range_j_re,ctx->Np_re);
448:   if (_range_k_re) PetscArraycpy(ctx->range_k_re,_range_k_re,ctx->Pp_re);

450:   /* TODO: use a single MPI call */
451:   MPI_Bcast(ctx->range_i_re,ctx->Mp_re,MPIU_INT,0,comm);
452:   MPI_Bcast(ctx->range_j_re,ctx->Np_re,MPIU_INT,0,comm);
453:   MPI_Bcast(ctx->range_k_re,ctx->Pp_re,MPIU_INT,0,comm);

455:   PetscMalloc3(ctx->Mp_re,&ctx->start_i_re,ctx->Np_re,&ctx->start_j_re,ctx->Pp_re,&ctx->start_k_re);

457:   sum = 0;
458:   for (k=0; k<ctx->Mp_re; k++) {
459:     ctx->start_i_re[k] = sum;
460:     sum += ctx->range_i_re[k];
461:   }

463:   sum = 0;
464:   for (k=0; k<ctx->Np_re; k++) {
465:     ctx->start_j_re[k] = sum;
466:     sum += ctx->range_j_re[k];
467:   }

469:   sum = 0;
470:   for (k=0; k<ctx->Pp_re; k++) {
471:     ctx->start_k_re[k] = sum;
472:     sum += ctx->range_k_re[k];
473:   }

475:   /* attach repartitioned dm to child ksp */
476:   {
477:     PetscErrorCode (*dmksp_func)(KSP,Mat,Mat,void*);
478:     void           *dmksp_ctx;

480:     DMKSPGetComputeOperators(dm,&dmksp_func,&dmksp_ctx);

482:     /* attach dm to ksp on sub communicator */
483:     if (PCTelescope_isActiveRank(sred)) {
484:       KSPSetDM(sred->ksp,ctx->dmrepart);

486:       if (!dmksp_func || sred->ignore_kspcomputeoperators) {
487:         KSPSetDMActive(sred->ksp,PETSC_FALSE);
488:       } else {
489:         /* sub ksp inherits dmksp_func and context provided by user */
490:         KSPSetComputeOperators(sred->ksp,dmksp_func,dmksp_ctx);
491:         KSPSetDMActive(sred->ksp,PETSC_TRUE);
492:       }
493:     }
494:   }
495:   return 0;
496: }

498: PetscErrorCode PCTelescopeSetUp_dmda_permutation_3d(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
499: {
501:   DM             dm;
502:   MPI_Comm       comm;
503:   Mat            Pscalar,P;
504:   PetscInt       ndof;
505:   PetscInt       i,j,k,location,startI[3],endI[3],lenI[3],nx,ny,nz;
506:   PetscInt       sr,er,Mr;
507:   Vec            V;

509:   PetscInfo(pc,"PCTelescope: setting up the permutation matrix (DMDA-3D)\n");
510:   PetscObjectGetComm((PetscObject)pc,&comm);

512:   PCGetDM(pc,&dm);
513:   DMDAGetInfo(dm,NULL,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);

515:   DMGetGlobalVector(dm,&V);
516:   VecGetSize(V,&Mr);
517:   VecGetOwnershipRange(V,&sr,&er);
518:   DMRestoreGlobalVector(dm,&V);
519:   sr = sr / ndof;
520:   er = er / ndof;
521:   Mr = Mr / ndof;

523:   MatCreate(comm,&Pscalar);
524:   MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
525:   MatSetType(Pscalar,MATAIJ);
526:   MatSeqAIJSetPreallocation(Pscalar,1,NULL);
527:   MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);

529:   DMDAGetCorners(dm,NULL,NULL,NULL,&lenI[0],&lenI[1],&lenI[2]);
530:   DMDAGetCorners(dm,&startI[0],&startI[1],&startI[2],&endI[0],&endI[1],&endI[2]);
531:   endI[0] += startI[0];
532:   endI[1] += startI[1];
533:   endI[2] += startI[2];

535:   for (k=startI[2]; k<endI[2]; k++) {
536:     for (j=startI[1]; j<endI[1]; j++) {
537:       for (i=startI[0]; i<endI[0]; i++) {
538:         PetscMPIInt rank_ijk_re,rank_reI[3];
539:         PetscInt    s0_re;
540:         PetscInt    ii,jj,kk,local_ijk_re,mapped_ijk;
541:         PetscInt    lenI_re[3];

543:         location = (i - startI[0]) + (j - startI[1])*lenI[0] + (k - startI[2])*lenI[0]*lenI[1];
544:         _DMDADetermineRankFromGlobalIJK(3,i,j,k,   ctx->Mp_re,ctx->Np_re,ctx->Pp_re,
545:                                                ctx->start_i_re,ctx->start_j_re,ctx->start_k_re,
546:                                                ctx->range_i_re,ctx->range_j_re,ctx->range_k_re,
547:                                                &rank_reI[0],&rank_reI[1],&rank_reI[2],&rank_ijk_re);
548:         _DMDADetermineGlobalS0(3,rank_ijk_re, ctx->Mp_re,ctx->Np_re,ctx->Pp_re, ctx->range_i_re,ctx->range_j_re,ctx->range_k_re, &s0_re);
549:         ii = i - ctx->start_i_re[ rank_reI[0] ];
551:         jj = j - ctx->start_j_re[ rank_reI[1] ];
553:         kk = k - ctx->start_k_re[ rank_reI[2] ];
555:         lenI_re[0] = ctx->range_i_re[ rank_reI[0] ];
556:         lenI_re[1] = ctx->range_j_re[ rank_reI[1] ];
557:         lenI_re[2] = ctx->range_k_re[ rank_reI[2] ];
558:         local_ijk_re = ii + jj * lenI_re[0] + kk * lenI_re[0] * lenI_re[1];
559:         mapped_ijk = s0_re + local_ijk_re;
560:         MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
561:       }
562:     }
563:   }
564:   MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
565:   MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);
566:   MatCreateMAIJ(Pscalar,ndof,&P);
567:   MatDestroy(&Pscalar);
568:   ctx->permutation = P;
569:   return 0;
570: }

572: PetscErrorCode PCTelescopeSetUp_dmda_permutation_2d(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
573: {
575:   DM             dm;
576:   MPI_Comm       comm;
577:   Mat            Pscalar,P;
578:   PetscInt       ndof;
579:   PetscInt       i,j,location,startI[2],endI[2],lenI[2],nx,ny,nz;
580:   PetscInt       sr,er,Mr;
581:   Vec            V;

583:   PetscInfo(pc,"PCTelescope: setting up the permutation matrix (DMDA-2D)\n");
584:   PetscObjectGetComm((PetscObject)pc,&comm);
585:   PCGetDM(pc,&dm);
586:   DMDAGetInfo(dm,NULL,&nx,&ny,&nz,NULL,NULL,NULL,&ndof,NULL,NULL,NULL,NULL,NULL);
587:   DMGetGlobalVector(dm,&V);
588:   VecGetSize(V,&Mr);
589:   VecGetOwnershipRange(V,&sr,&er);
590:   DMRestoreGlobalVector(dm,&V);
591:   sr = sr / ndof;
592:   er = er / ndof;
593:   Mr = Mr / ndof;

595:   MatCreate(comm,&Pscalar);
596:   MatSetSizes(Pscalar,(er-sr),(er-sr),Mr,Mr);
597:   MatSetType(Pscalar,MATAIJ);
598:   MatSeqAIJSetPreallocation(Pscalar,1,NULL);
599:   MatMPIAIJSetPreallocation(Pscalar,1,NULL,1,NULL);

601:   DMDAGetCorners(dm,NULL,NULL,NULL,&lenI[0],&lenI[1],NULL);
602:   DMDAGetCorners(dm,&startI[0],&startI[1],NULL,&endI[0],&endI[1],NULL);
603:   endI[0] += startI[0];
604:   endI[1] += startI[1];

606:   for (j=startI[1]; j<endI[1]; j++) {
607:     for (i=startI[0]; i<endI[0]; i++) {
608:       PetscMPIInt rank_ijk_re,rank_reI[3];
609:       PetscInt    s0_re;
610:       PetscInt    ii,jj,local_ijk_re,mapped_ijk;
611:       PetscInt    lenI_re[3];

613:       location = (i - startI[0]) + (j - startI[1])*lenI[0];
614:       _DMDADetermineRankFromGlobalIJK(2,i,j,0,   ctx->Mp_re,ctx->Np_re,ctx->Pp_re,
615:                                              ctx->start_i_re,ctx->start_j_re,ctx->start_k_re,
616:                                              ctx->range_i_re,ctx->range_j_re,ctx->range_k_re,
617:                                              &rank_reI[0],&rank_reI[1],NULL,&rank_ijk_re);

619:       _DMDADetermineGlobalS0(2,rank_ijk_re, ctx->Mp_re,ctx->Np_re,ctx->Pp_re, ctx->range_i_re,ctx->range_j_re,ctx->range_k_re, &s0_re);

621:       ii = i - ctx->start_i_re[ rank_reI[0] ];
623:       jj = j - ctx->start_j_re[ rank_reI[1] ];

626:       lenI_re[0] = ctx->range_i_re[ rank_reI[0] ];
627:       lenI_re[1] = ctx->range_j_re[ rank_reI[1] ];
628:       local_ijk_re = ii + jj * lenI_re[0];
629:       mapped_ijk = s0_re + local_ijk_re;
630:       MatSetValue(Pscalar,sr+location,mapped_ijk,1.0,INSERT_VALUES);
631:     }
632:   }
633:   MatAssemblyBegin(Pscalar,MAT_FINAL_ASSEMBLY);
634:   MatAssemblyEnd(Pscalar,MAT_FINAL_ASSEMBLY);
635:   MatCreateMAIJ(Pscalar,ndof,&P);
636:   MatDestroy(&Pscalar);
637:   ctx->permutation = P;
638:   return 0;
639: }

641: PetscErrorCode PCTelescopeSetUp_dmda_scatters(PC pc,PC_Telescope sred,PC_Telescope_DMDACtx *ctx)
642: {
643:   Vec            xred,yred,xtmp,x,xp;
644:   VecScatter     scatter;
645:   IS             isin;
646:   Mat            B;
647:   PetscInt       m,bs,st,ed;
648:   MPI_Comm       comm;

650:   PetscObjectGetComm((PetscObject)pc,&comm);
651:   PCGetOperators(pc,NULL,&B);
652:   MatCreateVecs(B,&x,NULL);
653:   MatGetBlockSize(B,&bs);
654:   VecDuplicate(x,&xp);
655:   m = 0;
656:   xred = NULL;
657:   yred = NULL;
658:   if (PCTelescope_isActiveRank(sred)) {
659:     DMCreateGlobalVector(ctx->dmrepart,&xred);
660:     VecDuplicate(xred,&yred);
661:     VecGetOwnershipRange(xred,&st,&ed);
662:     ISCreateStride(comm,ed-st,st,1,&isin);
663:     VecGetLocalSize(xred,&m);
664:   } else {
665:     VecGetOwnershipRange(x,&st,&ed);
666:     ISCreateStride(comm,0,st,1,&isin);
667:   }
668:   ISSetBlockSize(isin,bs);
669:   VecCreate(comm,&xtmp);
670:   VecSetSizes(xtmp,m,PETSC_DECIDE);
671:   VecSetBlockSize(xtmp,bs);
672:   VecSetType(xtmp,((PetscObject)x)->type_name);
673:   VecScatterCreate(x,isin,xtmp,NULL,&scatter);
674:   sred->xred    = xred;
675:   sred->yred    = yred;
676:   sred->isin    = isin;
677:   sred->scatter = scatter;
678:   sred->xtmp    = xtmp;

680:   ctx->xp       = xp;
681:   VecDestroy(&x);
682:   return 0;
683: }

685: PetscErrorCode PCTelescopeSetUp_dmda(PC pc,PC_Telescope sred)
686: {
687:   PC_Telescope_DMDACtx *ctx;
688:   PetscInt             dim;
689:   DM                   dm;
690:   MPI_Comm             comm;

692:   PetscInfo(pc,"PCTelescope: setup (DMDA)\n");
693:   PetscNew(&ctx);
694:   sred->dm_ctx = (void*)ctx;

696:   PetscObjectGetComm((PetscObject)pc,&comm);
697:   PCGetDM(pc,&dm);
698:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);

700:   PCTelescopeSetUp_dmda_repart(pc,sred,ctx);
701:   PCTelescopeSetUp_dmda_repart_coors(pc,sred,ctx);
702:   switch (dim) {
703:   case 1:
704:     SETERRQ(comm,PETSC_ERR_SUP,"Telescope: DMDA (1D) repartitioning not provided");
705:   case 2: PCTelescopeSetUp_dmda_permutation_2d(pc,sred,ctx);
706:     break;
707:   case 3: PCTelescopeSetUp_dmda_permutation_3d(pc,sred,ctx);
708:     break;
709:   }
710:   PCTelescopeSetUp_dmda_scatters(pc,sred,ctx);
711:   return 0;
712: }

714: PetscErrorCode PCTelescopeMatCreate_dmda_dmactivefalse(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
715: {
716:   PC_Telescope_DMDACtx *ctx;
717:   MPI_Comm             comm,subcomm;
718:   Mat                  Bperm,Bred,B,P;
719:   PetscInt             nr,nc;
720:   IS                   isrow,iscol;
721:   Mat                  Blocal,*_Blocal;

723:   PetscInfo(pc,"PCTelescope: updating the redundant preconditioned operator (DMDA)\n");
724:   PetscObjectGetComm((PetscObject)pc,&comm);
725:   subcomm = PetscSubcommChild(sred->psubcomm);
726:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;

728:   PCGetOperators(pc,NULL,&B);
729:   MatGetSize(B,&nr,&nc);

731:   P = ctx->permutation;
732:   MatPtAP(B,P,MAT_INITIAL_MATRIX,1.1,&Bperm);

734:   /* Get submatrices */
735:   isrow = sred->isin;
736:   ISCreateStride(comm,nc,0,1,&iscol);

738:   MatCreateSubMatrices(Bperm,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&_Blocal);
739:   Blocal = *_Blocal;
740:   Bred = NULL;
741:   if (PCTelescope_isActiveRank(sred)) {
742:     PetscInt mm;

744:     if (reuse != MAT_INITIAL_MATRIX) {Bred = *A;}
745:     MatGetSize(Blocal,&mm,NULL);
746:     /* MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,PETSC_DECIDE,reuse,&Bred); */
747:     MatCreateMPIMatConcatenateSeqMat(subcomm,Blocal,mm,reuse,&Bred);
748:   }
749:   *A = Bred;

751:   ISDestroy(&iscol);
752:   MatDestroy(&Bperm);
753:   MatDestroyMatrices(1,&_Blocal);
754:   return 0;
755: }

757: PetscErrorCode PCTelescopeMatCreate_dmda(PC pc,PC_Telescope sred,MatReuse reuse,Mat *A)
758: {
759:   DM             dm;
760:   PetscErrorCode (*dmksp_func)(KSP,Mat,Mat,void*);
761:   void           *dmksp_ctx;

763:   PCGetDM(pc,&dm);
764:   DMKSPGetComputeOperators(dm,&dmksp_func,&dmksp_ctx);
765:   /* We assume that dmksp_func = NULL, is equivalent to dmActive = PETSC_FALSE */
766:   if (dmksp_func && !sred->ignore_kspcomputeoperators) {
767:     DM  dmrepart;
768:     Mat Ak;

770:     *A = NULL;
771:     if (PCTelescope_isActiveRank(sred)) {
772:       KSPGetDM(sred->ksp,&dmrepart);
773:       if (reuse == MAT_INITIAL_MATRIX) {
774:         DMCreateMatrix(dmrepart,&Ak);
775:         *A = Ak;
776:       } else if (reuse == MAT_REUSE_MATRIX) {
777:         Ak = *A;
778:       }
779:       /*
780:        There is no need to explicitly assemble the operator now,
781:        the sub-KSP will call the method provided to KSPSetComputeOperators() during KSPSetUp()
782:       */
783:     }
784:   } else {
785:     PCTelescopeMatCreate_dmda_dmactivefalse(pc,sred,reuse,A);
786:   }
787:   return 0;
788: }

790: PetscErrorCode PCTelescopeSubNullSpaceCreate_dmda_Telescope(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
791: {
792:   PetscBool            has_const;
793:   PetscInt             i,k,n = 0;
794:   const Vec            *vecs;
795:   Vec                  *sub_vecs = NULL;
796:   MPI_Comm             subcomm;
797:   PC_Telescope_DMDACtx *ctx;

799:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
800:   subcomm = PetscSubcommChild(sred->psubcomm);
801:   MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);

803:   if (PCTelescope_isActiveRank(sred)) {
804:     /* create new vectors */
805:     if (n) {
806:       VecDuplicateVecs(sred->xred,n,&sub_vecs);
807:     }
808:   }

810:   /* copy entries */
811:   for (k=0; k<n; k++) {
812:     const PetscScalar *x_array;
813:     PetscScalar       *LA_sub_vec;
814:     PetscInt          st,ed;

816:     /* permute vector into ordering associated with re-partitioned dmda */
817:     MatMultTranspose(ctx->permutation,vecs[k],ctx->xp);

819:     /* pull in vector x->xtmp */
820:     VecScatterBegin(sred->scatter,ctx->xp,sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);
821:     VecScatterEnd(sred->scatter,ctx->xp,sred->xtmp,INSERT_VALUES,SCATTER_FORWARD);

823:     /* copy vector entries into xred */
824:     VecGetArrayRead(sred->xtmp,&x_array);
825:     if (sub_vecs) {
826:       if (sub_vecs[k]) {
827:         VecGetOwnershipRange(sub_vecs[k],&st,&ed);
828:         VecGetArray(sub_vecs[k],&LA_sub_vec);
829:         for (i=0; i<ed-st; i++) {
830:           LA_sub_vec[i] = x_array[i];
831:         }
832:         VecRestoreArray(sub_vecs[k],&LA_sub_vec);
833:       }
834:     }
835:     VecRestoreArrayRead(sred->xtmp,&x_array);
836:   }

838:   if (PCTelescope_isActiveRank(sred)) {
839:     /* create new (near) nullspace for redundant object */
840:     MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);
841:     VecDestroyVecs(n,&sub_vecs);
844:   }
845:   return 0;
846: }

848: PetscErrorCode PCTelescopeMatNullSpaceCreate_dmda(PC pc,PC_Telescope sred,Mat sub_mat)
849: {
850:   Mat            B;

852:   PCGetOperators(pc,NULL,&B);
853:   {
854:     MatNullSpace nullspace,sub_nullspace;
855:     MatGetNullSpace(B,&nullspace);
856:     if (nullspace) {
857:       PetscInfo(pc,"PCTelescope: generating nullspace (DMDA)\n");
858:       PCTelescopeSubNullSpaceCreate_dmda_Telescope(pc,sred,nullspace,&sub_nullspace);
859:       if (PCTelescope_isActiveRank(sred)) {
860:         MatSetNullSpace(sub_mat,sub_nullspace);
861:         MatNullSpaceDestroy(&sub_nullspace);
862:       }
863:     }
864:   }
865:   {
866:     MatNullSpace nearnullspace,sub_nearnullspace;
867:     MatGetNearNullSpace(B,&nearnullspace);
868:     if (nearnullspace) {
869:       PetscInfo(pc,"PCTelescope: generating near nullspace (DMDA)\n");
870:       PCTelescopeSubNullSpaceCreate_dmda_Telescope(pc,sred,nearnullspace,&sub_nearnullspace);
871:       if (PCTelescope_isActiveRank(sred)) {
872:         MatSetNearNullSpace(sub_mat,sub_nearnullspace);
873:         MatNullSpaceDestroy(&sub_nearnullspace);
874:       }
875:     }
876:   }
877:   return 0;
878: }

880: PetscErrorCode PCApply_Telescope_dmda(PC pc,Vec x,Vec y)
881: {
882:   PC_Telescope         sred = (PC_Telescope)pc->data;
883:   Mat                  perm;
884:   Vec                  xtmp,xp,xred,yred;
885:   PetscInt             i,st,ed;
886:   VecScatter           scatter;
887:   PetscScalar          *array;
888:   const PetscScalar    *x_array;
889:   PC_Telescope_DMDACtx *ctx;

891:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
892:   xtmp    = sred->xtmp;
893:   scatter = sred->scatter;
894:   xred    = sred->xred;
895:   yred    = sred->yred;
896:   perm    = ctx->permutation;
897:   xp      = ctx->xp;

899:   PetscCitationsRegister(citation,&cited);

901:   /* permute vector into ordering associated with re-partitioned dmda */
902:   MatMultTranspose(perm,x,xp);

904:   /* pull in vector x->xtmp */
905:   VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
906:   VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);

908:   /* copy vector entries into xred */
909:   VecGetArrayRead(xtmp,&x_array);
910:   if (xred) {
911:     PetscScalar *LA_xred;
912:     VecGetOwnershipRange(xred,&st,&ed);

914:     VecGetArray(xred,&LA_xred);
915:     for (i=0; i<ed-st; i++) {
916:       LA_xred[i] = x_array[i];
917:     }
918:     VecRestoreArray(xred,&LA_xred);
919:   }
920:   VecRestoreArrayRead(xtmp,&x_array);

922:   /* solve */
923:   if (PCTelescope_isActiveRank(sred)) {
924:     KSPSolve(sred->ksp,xred,yred);
925:     KSPCheckSolve(sred->ksp,pc,yred);
926:   }

928:   /* return vector */
929:   VecGetArray(xtmp,&array);
930:   if (yred) {
931:     const PetscScalar *LA_yred;
932:     VecGetOwnershipRange(yred,&st,&ed);
933:     VecGetArrayRead(yred,&LA_yred);
934:     for (i=0; i<ed-st; i++) {
935:       array[i] = LA_yred[i];
936:     }
937:     VecRestoreArrayRead(yred,&LA_yred);
938:   }
939:   VecRestoreArray(xtmp,&array);
940:   VecScatterBegin(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
941:   VecScatterEnd(scatter,xtmp,xp,INSERT_VALUES,SCATTER_REVERSE);
942:   MatMult(perm,xp,y);
943:   return 0;
944: }

946: PetscErrorCode PCApplyRichardson_Telescope_dmda(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol, PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
947: {
948:   PC_Telescope         sred = (PC_Telescope)pc->data;
949:   Mat                  perm;
950:   Vec                  xtmp,xp,yred;
951:   PetscInt             i,st,ed;
952:   VecScatter           scatter;
953:   const PetscScalar    *x_array;
954:   PetscBool            default_init_guess_value = PETSC_FALSE;
955:   PC_Telescope_DMDACtx *ctx;

957:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
958:   xtmp    = sred->xtmp;
959:   scatter = sred->scatter;
960:   yred    = sred->yred;
961:   perm    = ctx->permutation;
962:   xp      = ctx->xp;

965:   *reason = (PCRichardsonConvergedReason)0;

967:   if (!zeroguess) {
968:     PetscInfo(pc,"PCTelescopeDMDA: Scattering y for non-zero-initial guess\n");
969:     /* permute vector into ordering associated with re-partitioned dmda */
970:     MatMultTranspose(perm,y,xp);

972:     /* pull in vector x->xtmp */
973:     VecScatterBegin(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);
974:     VecScatterEnd(scatter,xp,xtmp,INSERT_VALUES,SCATTER_FORWARD);

976:     /* copy vector entries into xred */
977:     VecGetArrayRead(xtmp,&x_array);
978:     if (yred) {
979:       PetscScalar *LA_yred;
980:       VecGetOwnershipRange(yred,&st,&ed);
981:       VecGetArray(yred,&LA_yred);
982:       for (i=0; i<ed-st; i++) {
983:         LA_yred[i] = x_array[i];
984:       }
985:       VecRestoreArray(yred,&LA_yred);
986:     }
987:     VecRestoreArrayRead(xtmp,&x_array);
988:   }

990:   if (PCTelescope_isActiveRank(sred)) {
991:     KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);
992:     if (!zeroguess) KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);
993:   }

995:   PCApply_Telescope_dmda(pc,x,y);

997:   if (PCTelescope_isActiveRank(sred)) {
998:     KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);
999:   }

1001:   if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
1002:   *outits = 1;
1003:   return 0;
1004: }

1006: PetscErrorCode PCReset_Telescope_dmda(PC pc)
1007: {
1008:   PC_Telescope         sred = (PC_Telescope)pc->data;
1009:   PC_Telescope_DMDACtx *ctx;

1011:   ctx = (PC_Telescope_DMDACtx*)sred->dm_ctx;
1012:   VecDestroy(&ctx->xp);
1013:   MatDestroy(&ctx->permutation);
1014:   DMDestroy(&ctx->dmrepart);
1015:   PetscFree3(ctx->range_i_re,ctx->range_j_re,ctx->range_k_re);
1016:   PetscFree3(ctx->start_i_re,ctx->start_j_re,ctx->start_k_re);
1017:   return 0;
1018: }

1020: PetscErrorCode DMView_DA_Short_3d(DM dm,PetscViewer v)
1021: {
1022:   PetscInt       M,N,P,m,n,p,ndof,nsw;
1023:   MPI_Comm       comm;
1024:   PetscMPIInt    size;
1025:   const char*    prefix;

1027:   PetscObjectGetComm((PetscObject)dm,&comm);
1028:   MPI_Comm_size(comm,&size);
1029:   DMGetOptionsPrefix(dm,&prefix);
1030:   DMDAGetInfo(dm,NULL,&M,&N,&P,&m,&n,&p,&ndof,&nsw,NULL,NULL,NULL,NULL);
1031:   if (prefix) PetscViewerASCIIPrintf(v,"DMDA Object:    (%s)    %d MPI processes\n",prefix,size);
1032:   else PetscViewerASCIIPrintf(v,"DMDA Object:    %d MPI processes\n",size);
1033:   PetscViewerASCIIPrintf(v,"  M %D N %D P %D m %D n %D p %D dof %D overlap %D\n",M,N,P,m,n,p,ndof,nsw);
1034:   return 0;
1035: }

1037: PetscErrorCode DMView_DA_Short_2d(DM dm,PetscViewer v)
1038: {
1039:   PetscInt       M,N,m,n,ndof,nsw;
1040:   MPI_Comm       comm;
1041:   PetscMPIInt    size;
1042:   const char*    prefix;

1044:   PetscObjectGetComm((PetscObject)dm,&comm);
1045:   MPI_Comm_size(comm,&size);
1046:   DMGetOptionsPrefix(dm,&prefix);
1047:   DMDAGetInfo(dm,NULL,&M,&N,NULL,&m,&n,NULL,&ndof,&nsw,NULL,NULL,NULL,NULL);
1048:   if (prefix) PetscViewerASCIIPrintf(v,"DMDA Object:    (%s)    %d MPI processes\n",prefix,size);
1049:   else PetscViewerASCIIPrintf(v,"DMDA Object:    %d MPI processes\n",size);
1050:   PetscViewerASCIIPrintf(v,"  M %D N %D m %D n %D dof %D overlap %D\n",M,N,m,n,ndof,nsw);
1051:   return 0;
1052: }

1054: PetscErrorCode DMView_DA_Short(DM dm,PetscViewer v)
1055: {
1056:   PetscInt       dim;

1058:   DMDAGetInfo(dm,&dim,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
1059:   switch (dim) {
1060:   case 2: DMView_DA_Short_2d(dm,v);
1061:     break;
1062:   case 3: DMView_DA_Short_3d(dm,v);
1063:     break;
1064:   }
1065:   return 0;
1066: }