Actual source code: ex7f.F90


  2: ! Block Jacobi preconditioner for solving a linear system in parallel with KSP
  3: ! The code indicates the procedures for setting the particular block sizes and
  4: ! for using different linear solvers on the individual blocks

  6: ! This example focuses on ways to customize the block Jacobi preconditioner.
  7: ! See ex1.c and ex2.c for more detailed comments on the basic usage of KSP
  8: ! (including working with matrices and vectors)

 10: ! Recall: The block Jacobi method is equivalent to the ASM preconditioner with zero overlap.

 12: program main
 13: #include <petsc/finclude/petscksp.h>
 14:       use petscksp

 16:       implicit none
 17:       Vec             :: x,b,u      ! approx solution, RHS, exact solution
 18:       Mat             :: A            ! linear system matrix
 19:       KSP             :: ksp         ! KSP context
 20:       PC              :: myPc           ! PC context
 21:       PC              :: subpc        ! PC context for subdomain
 22:       PetscReal       :: norm         ! norm of solution error
 23:       PetscReal,parameter :: tol = 1.e-6
 24:       PetscErrorCode  :: ierr
 25:       PetscInt        :: i,j,Ii,JJ,n
 26:       PetscInt        :: m
 27:       PetscMPIInt     :: myRank,mySize
 28:       PetscInt        :: its,nlocal,first,Istart,Iend
 29:       PetscScalar     :: v
 30:       PetscScalar, parameter :: &
 31:         myNone = -1.0, &
 32:         sone   = 1.0
 33:       PetscBool       :: isbjacobi,flg
 34:       KSP,allocatable,dimension(:)      ::   subksp     ! array of local KSP contexts on this processor
 35:       PetscInt,allocatable,dimension(:) :: blks
 36:       character(len=PETSC_MAX_PATH_LEN) :: outputString
 37:       PetscInt,parameter :: one = 1, five = 5

 39:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 40:       if (ierr /= 0) then
 41:         write(6,*)'Unable to initialize PETSc'
 42:         stop
 43:       endif

 45:       m = 4
 46:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
 47:       CHKERRA(ierr)
 48:       call MPI_Comm_rank(PETSC_COMM_WORLD,myRank,ierr)
 49:       CHKERRA(ierr)
 50:       call MPI_Comm_size(PETSC_COMM_WORLD,mySize,ierr)
 51:       CHKERRA(ierr)
 52:       n=m+2

 54:       !-------------------------------------------------------------------
 55:       ! Compute the matrix and right-hand-side vector that define
 56:       ! the linear system, Ax = b.
 57:       !---------------------------------------------------------------

 59:       ! Create and assemble parallel matrix

 61:       call  MatCreate(PETSC_COMM_WORLD,A,ierr)
 62:       call  MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
 63:       call  MatSetFromOptions(A,ierr)
 64:       call  MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr)
 65:       call  MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)
 66:       call  MatGetOwnershipRange(A,Istart,Iend,ierr)

 68:       do Ii=Istart,Iend-1
 69:           v =-1.0; i = Ii/n; j = Ii - i*n
 70:           if (i>0) then
 71:             JJ = Ii - n
 72:             call MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
 73:           endif

 75:           if (i<m-1) then
 76:             JJ = Ii + n
 77:             call MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
 78:           endif

 80:           if (j>0) then
 81:             JJ = Ii - 1
 82:             call MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
 83:           endif

 85:           if (j<n-1) then
 86:             JJ = Ii + 1
 87:             call MatSetValues(A,one,Ii,one,JJ,v,ADD_VALUES,ierr);CHKERRA(ierr)
 88:           endif

 90:           v=4.0
 91:           call MatSetValues(A,one,Ii,one,Ii,v,ADD_VALUES,ierr);CHKERRA(ierr)

 93:         enddo

 95:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr);CHKERRA(ierr)
 96:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr);CHKERRA(ierr)

 98:       ! Create parallel vectors

100:       call  VecCreate(PETSC_COMM_WORLD,u,ierr)
101:       CHKERRA(ierr)
102:       call  VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
103:       CHKERRA(ierr)
104:       call  VecSetFromOptions(u,ierr)
105:       CHKERRA(ierr)
106:       call  VecDuplicate(u,b,ierr)
107:       call  VecDuplicate(b,x,ierr)

109:       ! Set exact solution; then compute right-hand-side vector.

111:       call Vecset(u,sone,ierr)
112:       CHKERRA(ierr)
113:       call MatMult(A,u,b,ierr)
114:       CHKERRA(ierr)

116:       ! Create linear solver context

118:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
119:       CHKERRA(ierr)

121:       ! Set operators. Here the matrix that defines the linear system
122:       ! also serves as the preconditioning matrix.

124:       call KSPSetOperators(ksp,A,A,ierr)
125:       CHKERRA(ierr)

127:       ! Set default preconditioner for this program to be block Jacobi.
128:       ! This choice can be overridden at runtime with the option
129:       ! -pc_type <type>

131:       call KSPGetPC(ksp,myPc,ierr)
132:       CHKERRA(ierr)
133:       call PCSetType(myPc,PCBJACOBI,ierr)
134:       CHKERRA(ierr)

136:       ! -----------------------------------------------------------------
137:       !            Define the problem decomposition
138:       !-------------------------------------------------------------------

140:       ! Call PCBJacobiSetTotalBlocks() to set individually the size of
141:       ! each block in the preconditioner.  This could also be done with
142:       ! the runtime option -pc_bjacobi_blocks <blocks>
143:       ! Also, see the command PCBJacobiSetLocalBlocks() to set the
144:       ! local blocks.

146:       ! Note: The default decomposition is 1 block per processor.

148:       allocate(blks(m),source = n)

150:       call PCBJacobiSetTotalBlocks(myPc,m,blks,ierr)
151:       CHKERRA(ierr)
152:       deallocate(blks)

154:       !-------------------------------------------------------------------
155:       !       Set the linear solvers for the subblocks
156:       !-------------------------------------------------------------------

158:       !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
159:       ! Basic method, should be sufficient for the needs of most users.
160:       !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161:       ! By default, the block Jacobi method uses the same solver on each
162:       ! block of the problem.  To set the same solver options on all blocks,
163:       ! use the prefix -sub before the usual PC and KSP options, e.g.,
164:       ! -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4

166:       !  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
167:       !  Advanced method, setting different solvers for various blocks.
168:       !- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

170:       ! Note that each block's KSP context is completely independent of
171:       ! the others, and the full range of uniprocessor KSP options is
172:       ! available for each block. The following section of code is intended
173:       ! to be a simple illustration of setting different linear solvers for
174:       ! the individual blocks.  These choices are obviously not recommended
175:       ! for solving this particular problem.

177:       call PetscObjectTypeCompare(myPc,PCBJACOBI,isbjacobi,ierr)

179:       if (isbjacobi) then

181:         ! Call KSPSetUp() to set the block Jacobi data structures (including
182:         ! creation of an internal KSP context for each block).
183:         ! Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP()

185:         call KSPSetUp(ksp,ierr)

187:         ! Extract the array of KSP contexts for the local blocks
188:         call PCBJacobiGetSubKSP(myPc,nlocal,first,PETSC_NULL_KSP,ierr)
189:         allocate(subksp(nlocal))
190:         call PCBJacobiGetSubKSP(myPc,nlocal,first,subksp,ierr)

192:         ! Loop over the local blocks, setting various KSP options for each block

194:         do i=0,nlocal-1

196:           call KSPGetPC(subksp(i+1),subpc,ierr)

198:           if (myRank>0) then

200:             if (mod(i,2)==1) then
201:               call PCSetType(subpc,PCILU,ierr); CHKERRA(ierr)

203:             else
204:               call PCSetType(subpc,PCNONE,ierr); CHKERRA(ierr)
205:               call KSPSetType(subksp(i+1),KSPBCGS,ierr); CHKERRA(ierr)
206:               call KSPSetTolerances(subksp(i+1),tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
207:               CHKERRA(ierr)
208:             endif

210:           else
211:             call PCSetType(subpc,PCJACOBI,ierr); CHKERRA(ierr)
212:             call KSPSetType(subksp(i+1),KSPGMRES,ierr); CHKERRA(ierr)
213:             call KSPSetTolerances(subksp(i+1),tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
214:             CHKERRA(ierr)
215:           endif

217:         end do

219:       endif

221:       !----------------------------------------------------------------
222:       !                Solve the linear system
223:       !-----------------------------------------------------------------

225:       ! Set runtime options

227:       call KSPSetFromOptions(ksp,ierr); CHKERRA(ierr)

229:       ! Solve the linear system

231:       call KSPSolve(ksp,b,x,ierr); CHKERRA(ierr)

233:       !  -----------------------------------------------------------------
234:       !               Check solution and clean up
235:       !-------------------------------------------------------------------

237:       !  -----------------------------------------------------------------
238:       ! Check the error
239:       !  -----------------------------------------------------------------

241:       !call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)

243:       call VecAXPY(x,myNone,u,ierr)

245:       !call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)

247:       call VecNorm(x,NORM_2,norm,ierr); CHKERRA(ierr)
248:       call KSPGetIterationNumber(ksp,its,ierr); CHKERRA(ierr)
249:       write(outputString,*)'Norm of error',real(norm),'Iterations',its,'\n'         ! PETScScalar might be of complex type
250:       call PetscPrintf(PETSC_COMM_WORLD,outputString,ierr); CHKERRA(ierr)

252:       ! Free work space.  All PETSc objects should be destroyed when they
253:       ! are no longer needed.
254:       deallocate(subksp)
255:       call KSPDestroy(ksp,ierr);CHKERRA(ierr)
256:       call VecDestroy(u,ierr); CHKERRA(ierr)
257:       call VecDestroy(b,ierr); CHKERRA(ierr)
258:       call MatDestroy(A,ierr); CHKERRA(ierr)
259:       call VecDestroy(x,ierr); CHKERRA(ierr)
260:       call PetscFinalize(ierr); CHKERRA(ierr)

262: end program main

264: !/*TEST
265: !
266: !   test:
267: !      nsize: 2
268: !      args: -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
269: !
270: !   test:
271: !      suffix: 2
272: !      nsize: 2
273: !      args: -ksp_view ::ascii_info_detail
274: !
275: !TEST*/