1: !
2: ! Description: Solves a linear system in parallel with KSP (Fortran code).
3: ! Also shows how to set a user-defined monitoring routine.
4: !
5: !
6: !/*T
7: ! Concepts: KSP^basic parallel example
8: ! Concepts: KSP^setting a user-defined monitoring routine
9: ! Processors: n
10: !T*/
11: !
12: ! -----------------------------------------------------------------------
14: program main
15: #include <petsc/finclude/petscksp.h> 16: use petscksp
17: implicit none
18: !
19: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
20: ! Variable declarations
21: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
22: !
23: ! Variables:
24: ! ksp - linear solver context
25: ! ksp - Krylov subspace method context
26: ! pc - preconditioner context
27: ! x, b, u - approx solution, right-hand-side, exact solution vectors
28: ! A - matrix that defines linear system
29: ! its - iterations for convergence
30: ! norm - norm of error in solution
31: ! rctx - random number generator context
32: !
33: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
34: ! are mathematical objects that contain more than just an array of
35: ! double precision numbers. I.e., vectors in PETSc are not just
36: ! double precision x(*).
37: ! However, local vector data can be easily accessed via VecGetArray().
38: ! See the Fortran section of the PETSc users manual for details.
39: !
40: PetscReal norm
41: PetscInt i,j,II,JJ,m,n,its
42: PetscInt Istart,Iend,ione
43: PetscErrorCode ierr
44: PetscMPIInt rank,size
45: PetscBool flg
46: PetscScalar v,one,neg_one
47: Vec x,b,u
48: Mat A
49: KSP ksp
50: PetscRandom rctx
51: PetscViewerAndFormat vf,vzero
53: ! These variables are not currently used.
54: ! PC pc
55: ! PCType ptype
56: ! PetscReal tol
59: ! Note: Any user-defined Fortran routines (such as MyKSPMonitor)
60: ! MUST be declared as external.
62: external MyKSPMonitor,MyKSPConverged
64: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
65: ! Beginning of program
66: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
68: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
69: if (ierr .ne. 0) then
70: print*,'Unable to initialize PETSc'
71: stop
72: endif
73: m = 3
74: n = 3
75: one = 1.0
76: neg_one = -1.0
77: ione = 1
78: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
79: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
80: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
81: call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)
83: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
84: ! Compute the matrix and right-hand-side vector that define
85: ! the linear system, Ax = b.
86: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
88: ! Create parallel matrix, specifying only its global dimensions.
89: ! When using MatCreate(), the matrix format can be specified at
90: ! runtime. Also, the parallel partitioning of the matrix is
91: ! determined by PETSc at runtime.
93: call MatCreate(PETSC_COMM_WORLD,A,ierr)
94: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
95: call MatSetFromOptions(A,ierr)
96: call MatSetUp(A,ierr)
98: ! Currently, all PETSc parallel matrix formats are partitioned by
99: ! contiguous chunks of rows across the processors. Determine which
100: ! rows of the matrix are locally owned.
102: call MatGetOwnershipRange(A,Istart,Iend,ierr)
104: ! Set matrix elements for the 2-D, five-point stencil in parallel.
105: ! - Each processor needs to insert only elements that it owns
106: ! locally (but any non-local elements will be sent to the
107: ! appropriate processor during matrix assembly).
108: ! - Always specify global row and columns of matrix entries.
109: ! - Note that MatSetValues() uses 0-based row and column numbers
110: ! in Fortran as well as in C.
112: ! Note: this uses the less common natural ordering that orders first
113: ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
114: ! instead of JJ = II +- m as you might expect. The more standard ordering
115: ! would first do all variables for y = h, then y = 2h etc.
117: do 10, II=Istart,Iend-1
118: v = -1.0
119: i = II/n
120: j = II - i*n
121: if (i.gt.0) then
122: JJ = II - n
123: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
124: endif
125: if (i.lt.m-1) then
126: JJ = II + n
127: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
128: endif
129: if (j.gt.0) then
130: JJ = II - 1
131: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
132: endif
133: if (j.lt.n-1) then
134: JJ = II + 1
135: call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
136: endif
137: v = 4.0
138: call MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)
139: 10 continue
141: ! Assemble matrix, using the 2-step process:
142: ! MatAssemblyBegin(), MatAssemblyEnd()
143: ! Computations can be done while messages are in transition,
144: ! by placing code between these two statements.
146: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
147: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
149: ! Create parallel vectors.
150: ! - Here, the parallel partitioning of the vector is determined by
151: ! PETSc at runtime. We could also specify the local dimensions
152: ! if desired -- or use the more general routine VecCreate().
153: ! - When solving a linear system, the vectors and matrices MUST
154: ! be partitioned accordingly. PETSc automatically generates
155: ! appropriately partitioned matrices and vectors when MatCreate()
156: ! and VecCreate() are used with the same communicator.
157: ! - Note: We form 1 vector from scratch and then duplicate as needed.
159: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
160: call VecSetFromOptions(u,ierr)
161: call VecDuplicate(u,b,ierr)
162: call VecDuplicate(b,x,ierr)
164: ! Set exact solution; then compute right-hand-side vector.
165: ! By default we use an exact solution of a vector with all
166: ! elements of 1.0; Alternatively, using the runtime option
167: ! -random_sol forms a solution vector with random components.
169: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-random_exact_sol',flg,ierr)
170: if (flg) then
171: call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
172: call PetscRandomSetFromOptions(rctx,ierr)
173: call VecSetRandom(u,rctx,ierr)
174: call PetscRandomDestroy(rctx,ierr)
175: else
176: call VecSet(u,one,ierr)
177: endif
178: call MatMult(A,u,b,ierr)
180: ! View the exact solution vector if desired
182: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-view_exact_sol',flg,ierr)
183: if (flg) then
184: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
185: endif
187: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
188: ! Create the linear solver and set various options
189: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: ! Create linear solver context
193: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
195: ! Set operators. Here the matrix that defines the linear system
196: ! also serves as the preconditioning matrix.
198: call KSPSetOperators(ksp,A,A,ierr)
200: ! Set linear solver defaults for this problem (optional).
201: ! - By extracting the KSP and PC contexts from the KSP context,
202: ! we can then directly directly call any KSP and PC routines
203: ! to set various options.
204: ! - The following four statements are optional; all of these
205: ! parameters could alternatively be specified at runtime via
206: ! KSPSetFromOptions(). All of these defaults can be
207: ! overridden at runtime, as indicated below.
209: ! We comment out this section of code since the Jacobi
210: ! preconditioner is not a good general default.
212: ! call KSPGetPC(ksp,pc,ierr)
213: ! ptype = PCJACOBI214: ! call PCSetType(pc,ptype,ierr)
215: ! tol = 1.e-7
216: ! call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
218: ! Set user-defined monitoring routine if desired
220: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_monitor',flg,ierr)
221: if (flg) then
222: vzero = 0
223: call KSPMonitorSet(ksp,MyKSPMonitor,vzero,PETSC_NULL_FUNCTION,ierr)
224: !
225: ! Also use the default KSP monitor routine showing how it may be used from Fortran
226: !
227: call PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_DEFAULT,vf,ierr)
228: call KSPMonitorSet(ksp,KSPMonitorDefault,vf,PetscViewerAndFormatDestroy,ierr)
229: endif
232: ! Set runtime options, e.g.,
233: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
234: ! These options will override those specified above as long as
235: ! KSPSetFromOptions() is called _after_ any other customization
236: ! routines.
238: call KSPSetFromOptions(ksp,ierr)
240: ! Set convergence test routine if desired
242: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-my_ksp_convergence',flg,ierr)
243: if (flg) then
244: call KSPSetConvergenceTest(ksp,MyKSPConverged,0,PETSC_NULL_FUNCTION,ierr)
245: endif
246: !
247: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
248: ! Solve the linear system
249: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
251: call KSPSolve(ksp,b,x,ierr)
253: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
254: ! Check solution and clean up
255: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
257: ! Check the error
258: call VecAXPY(x,neg_one,u,ierr)
259: call VecNorm(x,NORM_2,norm,ierr)
260: call KSPGetIterationNumber(ksp,its,ierr)
261: if (rank .eq. 0) then
262: if (norm .gt. 1.e-12) then
263: write(6,100) norm,its
264: else
265: write(6,110) its
266: endif
267: endif
268: 100 format('Norm of error ',e11.4,' iterations ',i5)
269: 110 format('Norm of error < 1.e-12 iterations ',i5)
271: ! Free work space. All PETSc objects should be destroyed when they
272: ! are no longer needed.
274: call KSPDestroy(ksp,ierr)
275: call VecDestroy(u,ierr)
276: call VecDestroy(x,ierr)
277: call VecDestroy(b,ierr)
278: call MatDestroy(A,ierr)
280: ! Always call PetscFinalize() before exiting a program. This routine
281: ! - finalizes the PETSc libraries as well as MPI
282: ! - provides summary and diagnostic information if certain runtime
283: ! options are chosen (e.g., -log_view). See PetscFinalize()
284: ! manpage for more information.
286: call PetscFinalize(ierr)
287: end
289: ! --------------------------------------------------------------
290: !
291: ! MyKSPMonitor - This is a user-defined routine for monitoring
292: ! the KSP iterative solvers.
293: !
294: ! Input Parameters:
295: ! ksp - iterative context
296: ! n - iteration number
297: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
298: ! dummy - optional user-defined monitor context (unused here)
299: !
300: subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)
301: use petscksp
302: implicit none
304: KSP ksp
305: Vec x
306: PetscErrorCode ierr
307: PetscInt n,dummy
308: PetscMPIInt rank
309: PetscReal rnorm
311: ! Build the solution vector
312: call KSPBuildSolution(ksp,PETSC_NULL_VEC,x,ierr)
314: ! Write the solution vector and residual norm to stdout
315: ! - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD316: ! handles data from multiple processors so that the
317: ! output is not jumbled.
319: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
320: if (rank .eq. 0) write(6,100) n
321: call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
322: if (rank .eq. 0) write(6,200) n,rnorm
324: 100 format('iteration ',i5,' solution vector:')
325: 200 format('iteration ',i5,' residual norm ',e11.4)
326: 0
327: end
329: ! --------------------------------------------------------------
330: !
331: ! MyKSPConverged - This is a user-defined routine for testing
332: ! convergence of the KSP iterative solvers.
333: !
334: ! Input Parameters:
335: ! ksp - iterative context
336: ! n - iteration number
337: ! rnorm - 2-norm (preconditioned) residual value (may be estimated)
338: ! dummy - optional user-defined monitor context (unused here)
339: !
340: subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)
341: use petscksp
342: implicit none
344: KSP ksp
345: PetscErrorCode ierr
346: PetscInt n,dummy
347: KSPConvergedReason flag
348: PetscReal rnorm
350: if (rnorm .le. .05) then
351: flag = 1
352: else
353: flag = 0
354: endif
355: 0
357: end
359: !/*TEST
360: !
361: ! test:
362: ! nsize: 2
363: ! args: -pc_type jacobi -ksp_monitor_short -ksp_gmres_cgs_refinement_type refine_always
364: !
365: ! test:
366: ! suffix: 2
367: ! nsize: 2
368: ! args: -pc_type jacobi -my_ksp_monitor -ksp_gmres_cgs_refinement_type refine_always
369: !
370: !TEST*/