Actual source code: ex4.c
2: static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
4: #include <petscdm.h>
5: #include <petscdmda.h>
7: int main(int argc,char **argv)
8: {
9: PetscMPIInt rank;
10: PetscInt M = 10,N = 8,m = PETSC_DECIDE;
11: PetscInt s =2,w=2,n = PETSC_DECIDE,nloc,l,i,j,kk;
12: PetscInt Xs,Xm,Ys,Ym,iloc,*iglobal;
13: const PetscInt *ltog;
14: PetscInt *lx = NULL,*ly = NULL;
15: PetscBool testorder = PETSC_FALSE,flg;
16: DMBoundaryType bx = DM_BOUNDARY_NONE,by= DM_BOUNDARY_NONE;
17: DM da;
18: PetscViewer viewer;
19: Vec local,global;
20: PetscScalar value;
21: DMDAStencilType st = DMDA_STENCIL_BOX;
22: AO ao;
24: PetscInitialize(&argc,&argv,(char*)0,help);
25: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,400,&viewer);
27: /* Readoptions */
28: PetscOptionsGetInt(NULL,NULL,"-NX",&M,NULL);
29: PetscOptionsGetInt(NULL,NULL,"-NY",&N,NULL);
30: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
31: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
32: PetscOptionsGetInt(NULL,NULL,"-s",&s,NULL);
33: PetscOptionsGetInt(NULL,NULL,"-w",&w,NULL);
35: flg = PETSC_FALSE;
36: PetscOptionsGetBool(NULL,NULL,"-xperiodic",&flg,NULL); if (flg) bx = DM_BOUNDARY_PERIODIC;
37: flg = PETSC_FALSE;
38: PetscOptionsGetBool(NULL,NULL,"-yperiodic",&flg,NULL); if (flg) by = DM_BOUNDARY_PERIODIC;
39: flg = PETSC_FALSE;
40: PetscOptionsGetBool(NULL,NULL,"-xghosted",&flg,NULL); if (flg) bx = DM_BOUNDARY_GHOSTED;
41: flg = PETSC_FALSE;
42: PetscOptionsGetBool(NULL,NULL,"-yghosted",&flg,NULL); if (flg) by = DM_BOUNDARY_GHOSTED;
43: flg = PETSC_FALSE;
44: PetscOptionsGetBool(NULL,NULL,"-star",&flg,NULL); if (flg) st = DMDA_STENCIL_STAR;
45: flg = PETSC_FALSE;
46: PetscOptionsGetBool(NULL,NULL,"-box",&flg,NULL); if (flg) st = DMDA_STENCIL_BOX;
47: flg = PETSC_FALSE;
48: PetscOptionsGetBool(NULL,NULL,"-testorder",&testorder,NULL);
49: /*
50: Test putting two nodes in x and y on each processor, exact last processor
51: in x and y gets the rest.
52: */
53: flg = PETSC_FALSE;
54: PetscOptionsGetBool(NULL,NULL,"-distribute",&flg,NULL);
55: if (flg) {
57: PetscMalloc1(m,&lx);
58: for (i=0; i<m-1; i++) { lx[i] = 4;}
59: lx[m-1] = M - 4*(m-1);
61: PetscMalloc1(n,&ly);
62: for (i=0; i<n-1; i++) { ly[i] = 2;}
63: ly[n-1] = N - 2*(n-1);
64: }
66: /* Create distributed array and get vectors */
67: DMDACreate2d(PETSC_COMM_WORLD,bx,by,st,M,N,m,n,w,s,lx,ly,&da);
68: DMSetFromOptions(da);
69: DMSetUp(da);
70: PetscFree(lx);
71: PetscFree(ly);
73: DMView(da,viewer);
74: DMCreateGlobalVector(da,&global);
75: DMCreateLocalVector(da,&local);
77: /* Set global vector; send ghost points to local vectors */
78: value = 1;
79: VecSet(global,value);
80: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
81: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
83: /* Scale local vectors according to processor rank; pass to global vector */
84: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
85: value = rank;
86: VecScale(local,value);
87: DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);
88: DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);
90: if (!testorder) { /* turn off printing when testing ordering mappings */
91: PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vectors:\n");
92: VecView(global,PETSC_VIEWER_STDOUT_WORLD);
93: PetscPrintf(PETSC_COMM_WORLD,"\n\n");
94: }
96: /* Send ghost points to local vectors */
97: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
98: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
100: flg = PETSC_FALSE;
101: PetscOptionsGetBool(NULL,NULL,"-local_print",&flg,NULL);
102: if (flg) {
103: PetscViewer sviewer;
105: PetscViewerASCIIPushSynchronized(PETSC_VIEWER_STDOUT_WORLD);
106: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
107: PetscViewerGetSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);
108: VecView(local,sviewer);
109: PetscViewerRestoreSubViewer(PETSC_VIEWER_STDOUT_WORLD,PETSC_COMM_SELF,&sviewer);
110: PetscViewerFlush(PETSC_VIEWER_STDOUT_WORLD);
111: PetscViewerASCIIPopSynchronized(PETSC_VIEWER_STDOUT_WORLD);
112: }
114: /* Tests mappings between application/PETSc orderings */
115: if (testorder) {
116: ISLocalToGlobalMapping ltogm;
118: DMGetLocalToGlobalMapping(da,<ogm);
119: ISLocalToGlobalMappingGetSize(ltogm,&nloc);
120: ISLocalToGlobalMappingGetIndices(ltogm,<og);
121: DMDAGetGhostCorners(da,&Xs,&Ys,NULL,&Xm,&Ym,NULL);
122: DMDAGetAO(da,&ao);
123: PetscMalloc1(nloc,&iglobal);
125: /* Set iglobal to be global indices for each processor's local and ghost nodes,
126: using the DMDA ordering of grid points */
127: kk = 0;
128: for (j=Ys; j<Ys+Ym; j++) {
129: for (i=Xs; i<Xs+Xm; i++) {
130: iloc = w*((j-Ys)*Xm + i-Xs);
131: for (l=0; l<w; l++) {
132: iglobal[kk++] = ltog[iloc+l];
133: }
134: }
135: }
137: /* Map this to the application ordering (which for DMDAs is just the natural ordering
138: that would be used for 1 processor, numbering most rapidly by x, then y) */
139: AOPetscToApplication(ao,nloc,iglobal);
141: /* Then map the application ordering back to the PETSc DMDA ordering */
142: AOApplicationToPetsc(ao,nloc,iglobal);
144: /* Verify the mappings */
145: kk=0;
146: for (j=Ys; j<Ys+Ym; j++) {
147: for (i=Xs; i<Xs+Xm; i++) {
148: iloc = w*((j-Ys)*Xm + i-Xs);
149: for (l=0; l<w; l++) {
150: if (iglobal[kk] != ltog[iloc+l]) {
151: PetscFPrintf(PETSC_COMM_SELF,stdout,"[%d] Problem with mapping: j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",rank,j,i,l,ltog[iloc+l],iglobal[kk]);
152: }
153: kk++;
154: }
155: }
156: }
157: PetscFree(iglobal);
158: ISLocalToGlobalMappingRestoreIndices(ltogm,<og);
159: }
161: /* Free memory */
162: PetscViewerDestroy(&viewer);
163: VecDestroy(&local);
164: VecDestroy(&global);
165: DMDestroy(&da);
167: PetscFinalize();
168: return 0;
169: }
171: /*TEST
173: test:
174: nsize: 4
175: args: -nox
176: filter: grep -v -i Object
177: requires: x
179: test:
180: suffix: 2
181: args: -testorder -nox
182: requires: x
184: TEST*/