Actual source code: ex21f.F
1: !
2: ! "$Id: ex21f.F,v 1.9 2001/08/07 03:04:00 balay Exp $";
3: !
4: ! Solves a linear system in parallel with KSP. Also indicates
5: ! use of a user-provided preconditioner. Input parameters include:
6: !
7: ! Program usage: mpirun ex21f [-help] [all PETSc options]
8: !
9: !/*T
10: ! Concepts: KSP^basic parallel example
11: ! Concepts: PC^setting a user-defined shell preconditioner
12: ! Processors: n
13: !T*/
14: !
15: ! -------------------------------------------------------------------------
17: program main
18: implicit none
20: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
21: ! Include files
22: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
23: !
24: ! petsc.h - base PETSc routines petscvec.h - vectors
25: ! petscsys.h - system routines petscmat.h - matrices
26: ! petscksp.h - Krylov subspace methods petscpc.h - preconditioners
28: #include include/finclude/petsc.h
29: #include include/finclude/petscvec.h
30: #include include/finclude/petscmat.h
31: #include include/finclude/petscpc.h
32: #include include/finclude/petscksp.h
34: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
35: ! Variable declarations
36: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
37: !
38: ! Variables:
39: ! ksp - linear solver context
40: ! ksp - Krylov subspace method context
41: ! pc - preconditioner context
42: ! x, b, u - approx solution, right-hand-side, exact solution vectors
43: ! A - matrix that defines linear system
44: ! its - iterations for convergence
45: ! norm - norm of solution error
47: Vec x,b,u
48: Mat A
49: PC pc
50: KSP ksp
51: PetscScalar v,one,neg_one
52: double precision norm,tol
53: integer i,j,II,JJ,Istart,Iend,ierr,m,n
54: integer its,flg,rank
56: ! Note: Any user-defined Fortran routines MUST be declared as external.
58: external SampleShellPCSetUp,SampleShellPCApply
60: ! Common block to store data for user-provided preconditioner
61: common /mypcs/ jacobi,sor,work
62: PC jacobi,sor
63: Vec work
65: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
66: ! Beginning of program
67: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
69: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
70: one = 1.0
71: neg_one = -1.0
72: m = 8
73: n = 7
74: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
75: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
76: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
78: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
79: ! Compute the matrix and right-hand-side vector that define
80: ! the linear system, Ax = b.
81: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
83: ! Create parallel matrix, specifying only its global dimensions.
84: ! When using MatCreate(), the matrix format can be specified at
85: ! runtime. Also, the parallel partitioning of the matrix is
86: ! determined by PETSc at runtime.
88: call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n, &
89: & m*n,A,ierr)
91: call MatSetFromOptions(A,ierr)
93: ! Currently, all PETSc parallel matrix formats are partitioned by
94: ! contiguous chunks of rows across the processors. Determine which
95: ! rows of the matrix are locally owned.
97: call MatGetOwnershipRange(A,Istart,Iend,ierr)
99: ! Set matrix elements for the 2-D, five-point stencil in parallel.
100: ! - Each processor needs to insert only elements that it owns
101: ! locally (but any non-local elements will be sent to the
102: ! appropriate processor during matrix assembly).
103: ! - Always specify global row and columns of matrix entries.
104: ! - Note that MatSetValues() uses 0-based row and column numbers
105: ! in Fortran as well as in C.
107: do 10, II=Istart,Iend-1
108: v = -1.0
109: i = II/n
110: j = II - i*n
111: if (i.gt.0) then
112: JJ = II - n
113: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
114: endif
115: if (i.lt.m-1) then
116: JJ = II + n
117: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
118: endif
119: if (j.gt.0) then
120: JJ = II - 1
121: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
122: endif
123: if (j.lt.n-1) then
124: JJ = II + 1
125: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
126: endif
127: v = 4.0
128: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
129: 10 continue
131: ! Assemble matrix, using the 2-step process:
132: ! MatAssemblyBegin(), MatAssemblyEnd()
133: ! Computations can be done while messages are in transition,
134: ! by placing code between these two statements.
136: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
137: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
139: ! Create parallel vectors.
140: ! - Here, the parallel partitioning of the vector is determined by
141: ! PETSc at runtime. We could also specify the local dimensions
142: ! if desired -- or use the more general routine VecCreate().
143: ! - When solving a linear system, the vectors and matrices MUST
144: ! be partitioned accordingly. PETSc automatically generates
145: ! appropriately partitioned matrices and vectors when MatCreate()
146: ! and VecCreate() are used with the same communicator.
147: ! - Note: We form 1 vector from scratch and then duplicate as needed.
149: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
150: call VecDuplicate(u,b,ierr)
151: call VecDuplicate(b,x,ierr)
153: ! Set exact solution; then compute right-hand-side vector.
155: call VecSet(one,u,ierr)
156: call MatMult(A,u,b,ierr)
158: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
159: ! Create the linear solver and set various options
160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: ! Create linear solver context
164: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
166: ! Set operators. Here the matrix that defines the linear system
167: ! also serves as the preconditioning matrix.
169: call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
171: ! Set linear solver defaults for this problem (optional).
172: ! - By extracting the KSP and PC contexts from the KSP context,
173: ! we can then directly directly call any KSP and PC routines
174: ! to set various options.
176: call KSPGetPC(ksp,pc,ierr)
177: tol = 1.e-7
178: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_DOUBLE_PRECISION, &
179: & PETSC_DEFAULT_DOUBLE_PRECISION,PETSC_DEFAULT_INTEGER,ierr)
181: !
182: ! Set a user-defined shell preconditioner
183: !
185: ! (Required) Indicate to PETSc that we are using a shell preconditioner
186: call PCSetType(pc,PCSHELL,ierr)
188: ! (Required) Set the user-defined routine for applying the preconditioner
189: call PCShellSetApply(pc,SampleShellPCApply,PETSC_NULL_OBJECT, &
190: & ierr)
192: ! (Optional) Do any setup required for the preconditioner
193: call SampleShellPCSetUp(A,x,ierr)
196: ! Set runtime options, e.g.,
197: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
198: ! These options will override those specified above as long as
199: ! KSPSetFromOptions() is called _after_ any other customization
200: ! routines.
202: call KSPSetFromOptions(ksp,ierr)
204: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205: ! Solve the linear system
206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: call KSPSetRhs(ksp,b,ierr)
209: call KSPSetSolution(ksp,x,ierr)
210: call KSPSolve(ksp,ierr)
212: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213: ! Check solution and clean up
214: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
216: ! Check the error
218: call VecAXPY(neg_one,u,x,ierr)
219: call VecNorm(x,NORM_2,norm,ierr)
220: call KSPGetIterationNumber(ksp,its,ierr)
222: if (rank .eq. 0) then
223: if (norm .gt. 1.e-12) then
224: write(6,100) norm,its
225: else
226: write(6,110) its
227: endif
228: endif
229: 100 format('Norm of error ',1pe10.4,' iterations ',i5)
230: 110 format('Norm of error < 1.e-12,iterations ',i5)
233: ! Free work space. All PETSc objects should be destroyed when they
234: ! are no longer needed.
236: call KSPDestroy(ksp,ierr)
237: call VecDestroy(u,ierr)
238: call VecDestroy(x,ierr)
239: call VecDestroy(b,ierr)
240: call MatDestroy(A,ierr)
242: ! Free up PCShell data
243: call PCDestroy(sor,ierr)
244: call PCDestroy(jacobi,ierr)
245: call VecDestroy(work,ierr)
248: ! Always call PetscFinalize() before exiting a program.
250: call PetscFinalize(ierr)
251: end
253: !/***********************************************************************/
254: !/* Routines for a user-defined shell preconditioner */
255: !/***********************************************************************/
257: !
258: ! SampleShellPCSetUp - This routine sets up a user-defined
259: ! preconditioner context.
260: !
261: ! Input Parameters:
262: ! pmat - preconditioner matrix
263: ! x - vector
264: !
265: ! Output Parameter:
266: ! ierr - error code (nonzero if error has been detected)
267: !
268: ! Notes:
269: ! In this example, we define the shell preconditioner to be Jacobi
270: ! method. Thus, here we create a work vector for storing the reciprocal
271: ! of the diagonal of the preconditioner matrix; this vector is then
272: ! used within the routine SampleShellPCApply().
273: !
274: subroutine SampleShellPCSetUp(pmat,x,ierr)
276: implicit none
278: #include include/finclude/petsc.h
279: #include include/finclude/petscvec.h
280: #include include/finclude/petscmat.h
282: Vec x
283: Mat pmat
284: integer ierr
286: ! Common block to store data for user-provided preconditioner
287: common /mypcs/ jacobi,sor,work
288: PC jacobi,sor
289: Vec work
291: call PCCreate(PETSC_COMM_WORLD,jacobi,ierr)
292: call PCSetType(jacobi,PCJACOBI,ierr)
293: call PCSetVector(jacobi,x,ierr)
294: call PCSetOperators(jacobi,pmat,pmat,DIFFERENT_NONZERO_PATTERN, &
295: & ierr)
296: call PCSetUp(jacobi,ierr)
298: call PCCreate(PETSC_COMM_WORLD,sor,ierr)
299: call PCSetType(sor,PCSOR,ierr)
300: call PCSetVector(sor,x,ierr)
301: call PCSetOperators(sor,pmat,pmat,DIFFERENT_NONZERO_PATTERN, &
302: & ierr)
303: ! call PCSORSetSymmetric(sor,SOR_LOCAL_SYMMETRIC_SWEEP,ierr)
304: call PCSetUp(sor,ierr)
306: call VecDuplicate(x,work,ierr)
308: end
310: ! -------------------------------------------------------------------
311: !
312: ! SampleShellPCApply - This routine demonstrates the use of a
313: ! user-provided preconditioner.
314: !
315: ! Input Parameters:
316: ! dummy - optional user-defined context, not used here
317: ! x - input vector
318: !
319: ! Output Parameters:
320: ! y - preconditioned vector
321: ! ierr - error code (nonzero if error has been detected)
322: !
323: ! Notes:
324: ! This code implements the Jacobi preconditioner plus the
325: ! SOR preconditioner
326: !
327: ! YOU CAN GET THE EXACT SAME EFFECT WITH THE PCCOMPOSITE preconditioner using
328: ! mpirun -np 1 ex21f -ksp_monitor -pc_type composite -pc_composite_pcs jacobi,sor -pc_composite_type additive
329: !
330: subroutine SampleShellPCApply(dummy,x,y,ierr)
332: implicit none
334: #include include/finclude/petsc.h
335: #include include/finclude/petscvec.h
336: #include include/finclude/petscpc.h
338: Vec x,y
339: integer dummy,ierr
340: PetscScalar one
341:
342: ! Common block to store data for user-provided preconditioner
343: common /mypcs/ jacobi,sor,work
344: PC jacobi,sor
345: Vec work
347: one = 1.0
348: call PCApply(jacobi,x,y,PC_LEFT,ierr)
349: call PCApply(sor,x,work,PC_LEFT,ierr)
350: call VecAXPY(one,work,y,ierr)
352: end