Actual source code: ex15f.F90
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: ! -user_defined_pc : Activate a user-defined preconditioner
5: !
6: !
8: !
9: ! -------------------------------------------------------------------------
10: !
11: ! Module contains diag needed by shell preconditioner
12: !
13: module mymoduleex15f
14: #include <petsc/finclude/petscksp.h>
15: use petscksp
16: Vec diag
17: end module
19: program main
20: use mymoduleex15f
21: implicit none
23: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
24: ! Variable declarations
25: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
26: !
27: ! Variables:
28: ! ksp - linear solver context
29: ! ksp - Krylov subspace method context
30: ! pc - preconditioner context
31: ! x, b, u - approx solution, right-hand-side, exact solution vectors
32: ! A - matrix that defines linear system
33: ! its - iterations for convergence
34: ! norm - norm of solution error
36: Vec x,b,u
37: Mat A
38: PC pc
39: KSP ksp
40: PetscScalar v,one,neg_one
41: PetscReal norm,tol
42: PetscErrorCode ierr
43: PetscInt i,j,II,JJ,Istart
44: PetscInt Iend,m,n,i1,its,five
45: PetscMPIInt rank
46: PetscBool user_defined_pc,flg
48: ! Note: Any user-defined Fortran routines MUST be declared as external.
50: external SampleShellPCSetUp, SampleShellPCApply
51: external SampleShellPCDestroy
53: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
54: ! Beginning of program
55: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
57: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
58: if (ierr .ne. 0) then
59: print*,'Unable to initialize PETSc'
60: stop
61: endif
62: one = 1.0
63: neg_one = -1.0
64: i1 = 1
65: m = 8
66: n = 7
67: five = 5
68: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
69: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
70: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
72: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
73: ! Compute the matrix and right-hand-side vector that define
74: ! the linear system, Ax = b.
75: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
77: ! Create parallel matrix, specifying only its global dimensions.
78: ! When using MatCreate(), the matrix format can be specified at
79: ! runtime. Also, the parallel partitioning of the matrix is
80: ! determined by PETSc at runtime.
82: call MatCreate(PETSC_COMM_WORLD,A,ierr)
83: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
84: call MatSetType(A, MATAIJ,ierr)
85: call MatSetFromOptions(A,ierr)
86: call MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr)
87: call MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)
89: ! Currently, all PETSc parallel matrix formats are partitioned by
90: ! contiguous chunks of rows across the processors. Determine which
91: ! rows of the matrix are locally owned.
93: call MatGetOwnershipRange(A,Istart,Iend,ierr)
95: ! Set matrix elements for the 2-D, five-point stencil in parallel.
96: ! - Each processor needs to insert only elements that it owns
97: ! locally (but any non-local elements will be sent to the
98: ! appropriate processor during matrix assembly).
99: ! - Always specify global row and columns of matrix entries.
100: ! - Note that MatSetValues() uses 0-based row and column numbers
101: ! in Fortran as well as in C.
103: do 10, II=Istart,Iend-1
104: v = -1.0
105: i = II/n
106: j = II - i*n
107: if (i.gt.0) then
108: JJ = II - n
109: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
110: endif
111: if (i.lt.m-1) then
112: JJ = II + n
113: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
114: endif
115: if (j.gt.0) then
116: JJ = II - 1
117: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
118: endif
119: if (j.lt.n-1) then
120: JJ = II + 1
121: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
122: endif
123: v = 4.0
124: call MatSetValues(A,i1,II,i1,II,v,ADD_VALUES,ierr)
125: 10 continue
127: ! Assemble matrix, using the 2-step process:
128: ! MatAssemblyBegin(), MatAssemblyEnd()
129: ! Computations can be done while messages are in transition,
130: ! by placing code between these two statements.
132: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
133: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
135: ! Create parallel vectors.
136: ! - Here, the parallel partitioning of the vector is determined by
137: ! PETSc at runtime. We could also specify the local dimensions
138: ! if desired -- or use the more general routine VecCreate().
139: ! - When solving a linear system, the vectors and matrices MUST
140: ! be partitioned accordingly. PETSc automatically generates
141: ! appropriately partitioned matrices and vectors when MatCreate()
142: ! and VecCreate() are used with the same communicator.
143: ! - Note: We form 1 vector from scratch and then duplicate as needed.
145: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
146: call VecDuplicate(u,b,ierr)
147: call VecDuplicate(b,x,ierr)
149: ! Set exact solution; then compute right-hand-side vector.
151: call VecSet(u,one,ierr)
152: call MatMult(A,u,b,ierr)
154: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
155: ! Create the linear solver and set various options
156: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158: ! Create linear solver context
160: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
162: ! Set operators. Here the matrix that defines the linear system
163: ! also serves as the preconditioning matrix.
165: call KSPSetOperators(ksp,A,A,ierr)
167: ! Set linear solver defaults for this problem (optional).
168: ! - By extracting the KSP and PC contexts from the KSP context,
169: ! we can then directly directly call any KSP and PC routines
170: ! to set various options.
172: call KSPGetPC(ksp,pc,ierr)
173: tol = 1.e-7
174: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
176: !
177: ! Set a user-defined shell preconditioner if desired
178: !
179: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-user_defined_pc',user_defined_pc,ierr)
181: if (user_defined_pc) then
183: ! (Required) Indicate to PETSc that we are using a shell preconditioner
184: call PCSetType(pc,PCSHELL,ierr)
186: ! (Required) Set the user-defined routine for applying the preconditioner
187: call PCShellSetApply(pc,SampleShellPCApply,ierr)
189: ! (Optional) Do any setup required for the preconditioner
190: call PCShellSetSetUp(pc,SampleShellPCSetUp,ierr)
192: ! (Optional) Frees any objects we created for the preconditioner
193: call PCShellSetDestroy(pc,SampleShellPCDestroy,ierr)
195: else
196: call PCSetType(pc,PCJACOBI,ierr)
197: endif
199: ! Set runtime options, e.g.,
200: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
201: ! These options will override those specified above as long as
202: ! KSPSetFromOptions() is called _after_ any other customization
203: ! routines.
205: call KSPSetFromOptions(ksp,ierr)
207: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: ! Solve the linear system
209: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
211: call KSPSolve(ksp,b,x,ierr)
213: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
214: ! Check solution and clean up
215: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
217: ! Check the error
219: call VecAXPY(x,neg_one,u,ierr)
220: call VecNorm(x,NORM_2,norm,ierr)
221: call KSPGetIterationNumber(ksp,its,ierr)
223: if (rank .eq. 0) then
224: if (norm .gt. 1.e-12) then
225: write(6,100) norm,its
226: else
227: write(6,110) its
228: endif
229: endif
230: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
231: 110 format('Norm of error < 1.e-12,iterations ',i5)
233: ! Free work space. All PETSc objects should be destroyed when they
234: ! are no longer needed.
236: call KSPDestroy(ksp,ierr)
237: call VecDestroy(u,ierr)
238: call VecDestroy(x,ierr)
239: call VecDestroy(b,ierr)
240: call MatDestroy(A,ierr)
242: ! Always call PetscFinalize() before exiting a program.
244: call PetscFinalize(ierr)
245: end
247: !/***********************************************************************/
248: !/* Routines for a user-defined shell preconditioner */
249: !/***********************************************************************/
251: !
252: ! SampleShellPCSetUp - This routine sets up a user-defined
253: ! preconditioner context.
254: !
255: ! Input Parameters:
256: ! pc - preconditioner object
257: !
258: ! Output Parameter:
259: ! ierr - error code (nonzero if error has been detected)
260: !
261: ! Notes:
262: ! In this example, we define the shell preconditioner to be Jacobi
263: ! method. Thus, here we create a work vector for storing the reciprocal
264: ! of the diagonal of the preconditioner matrix; this vector is then
265: ! used within the routine SampleShellPCApply().
266: !
267: subroutine SampleShellPCSetUp(pc,ierr)
268: use mymoduleex15f
269: use petscksp
270: implicit none
272: PC pc
273: Mat pmat
274: PetscErrorCode ierr
276: call PCGetOperators(pc,PETSC_NULL_MAT,pmat,ierr)
277: call MatCreateVecs(pmat,diag,PETSC_NULL_VEC,ierr)
278: call MatGetDiagonal(pmat,diag,ierr)
279: call VecReciprocal(diag,ierr)
281: end
283: ! -------------------------------------------------------------------
284: !
285: ! SampleShellPCApply - This routine demonstrates the use of a
286: ! user-provided preconditioner.
287: !
288: ! Input Parameters:
289: ! pc - preconditioner object
290: ! x - input vector
291: !
292: ! Output Parameters:
293: ! y - preconditioned vector
294: ! ierr - error code (nonzero if error has been detected)
295: !
296: ! Notes:
297: ! This code implements the Jacobi preconditioner, merely as an
298: ! example of working with a PCSHELL. Note that the Jacobi method
299: ! is already provided within PETSc.
300: !
301: subroutine SampleShellPCApply(pc,x,y,ierr)
302: use mymoduleex15f
303: implicit none
305: PC pc
306: Vec x,y
307: PetscErrorCode ierr
309: call VecPointwiseMult(y,x,diag,ierr)
311: end
313: !/***********************************************************************/
314: !/* Routines for a user-defined shell preconditioner */
315: !/***********************************************************************/
317: !
318: ! SampleShellPCDestroy - This routine destroys (frees the memory of) any
319: ! objects we made for the preconditioner
320: !
321: ! Input Parameters:
322: ! pc - for this example we use the actual PC as our shell context
323: !
324: ! Output Parameter:
325: ! ierr - error code (nonzero if error has been detected)
326: !
328: subroutine SampleShellPCDestroy(pc,ierr)
329: use mymoduleex15f
330: implicit none
332: PC pc
333: PetscErrorCode ierr
335: ! Normally we would recommend storing all the work data (like diag) in
336: ! the context set with PCShellSetContext()
338: call VecDestroy(diag,ierr)
340: end
342: !
343: !/*TEST
344: !
345: ! test:
346: ! nsize: 2
347: ! args: -ksp_view -user_defined_pc -ksp_gmres_cgs_refinement_type refine_always
348: !
349: !TEST*/