Actual source code: ex8f.F
1: !
2: ! "$Id: ex8f.F,v 1.2 2001/01/15 21:47:06 bsmith Exp $";
3: !
4: ! Tests MGSetResidual
5: !
6: ! -----------------------------------------------------------------------
8: program main
9: implicit none
11: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
12: ! Include files
13: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
14: !
15: !
16: #include include/finclude/petsc.h
17: #include include/finclude/petscvec.h
18: #include include/finclude/petscmat.h
19: #include include/finclude/petscpc.h
20: #include include/finclude/petscksp.h
21: #include include/finclude/petscmg.h
22: #include include/finclude/petscsys.h
23: !
24: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
25: ! Variable declarations
26: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
27: !
28: ! Variables:
29: ! ksp - linear solver context
30: ! x, b, u - approx solution, right-hand-side, exact solution vectors
31: ! A - matrix that defines linear system
32: ! its - iterations for convergence
33: ! norm - norm of error in solution
34: ! rctx - random number context
35: !
37: Mat A
38: Vec x,b,u
39: PC pc
40: integer n,dim,ierr,istart,iend,i,j,jj,ii
41: double precision v,h2
42: external MyResidual
43: PetscScalar pfive
44: KSP ksp
46: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
47: ! Beginning of program
48: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
50: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
51: pfive = .5d0
52: n = 6
53: dim = n*n
55: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
56: ! Compute the matrix and right-hand-side vector that define
57: ! the linear system, Ax = b.
58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
60: ! Create parallel matrix, specifying only its global dimensions.
61: ! When using MatCreate(), the matrix format can be specified at
62: ! runtime. Also, the parallel partitioning of the matrix is
63: ! determined by PETSc at runtime.
65: call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,dim, &
66: & dim,A,ierr)
67: call MatSetFromOptions(A,ierr)
69: ! Currently, all PETSc parallel matrix formats are partitioned by
70: ! contiguous chunks of rows across the processors. Determine which
71: ! rows of the matrix are locally owned.
73: call MatGetOwnershipRange(A,Istart,Iend,ierr)
75: ! Set matrix elements in parallel.
76: ! - Each processor needs to insert only elements that it owns
77: ! locally (but any non-local elements will be sent to the
78: ! appropriate processor during matrix assembly).
79: ! - Always specify global rows and columns of matrix entries.
81: h2 = 1.0/((n+1)*(n+1))
83: do 10, II=Istart,Iend-1
84: v = -1.0
85: i = II/n
86: j = II - i*n
87: if (i.gt.0) then
88: JJ = II - n
89: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
90: endif
91: if (i.lt.n-1) then
92: JJ = II + n
93: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
94: endif
95: if (j.gt.0) then
96: JJ = II - 1
97: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
98: endif
99: if (j.lt.n-1) then
100: JJ = II + 1
101: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
102: endif
103: v = 4.0
104: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
105: 10 continue
107: ! Assemble matrix, using the 2-step process:
108: ! MatAssemblyBegin(), MatAssemblyEnd()
109: ! Computations can be done while messages are in transition
110: ! by placing code between these two statements.
112: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
113: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
115: ! Create parallel vectors.
116: ! - Here, the parallel partitioning of the vector is determined by
117: ! PETSc at runtime. We could also specify the local dimensions
118: ! if desired.
119: ! - Note: We form 1 vector from scratch and then duplicate as needed.
121: call VecCreate(PETSC_COMM_WORLD,u,ierr)
122: call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
123: call VecSetFromOptions(u,ierr)
124: call VecDuplicate(u,b,ierr)
125: call VecDuplicate(b,x,ierr)
127: ! Set exact solution; then compute right-hand-side vector.
129: call VecSet(pfive,u,ierr)
130: call MatMult(A,u,b,ierr)
132: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133: ! Create the linear solver and set various options
134: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
136: ! Create linear solver context
138: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
139: call KSPGetPC(ksp,pc,ierr)
140: call PCSetType(pc,PCMG,ierr)
141: call MGSetLevels(pc,1,PETSC_NULL_OBJECT,ierr)
142: call MGSetResidual(pc,0,MGDefaultResidual,A,ierr)
144: call MGSetResidual(pc,0,MyResidual,A,ierr)
146: ! Set operators. Here the matrix that defines the linear system
147: ! also serves as the preconditioning matrix.
149: call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
152: call KSPDestroy(ksp,ierr)
153: call VecDestroy(u,ierr)
154: call VecDestroy(x,ierr)
155: call VecDestroy(b,ierr)
156: call MatDestroy(A,ierr)
158: 200 continue
159: call PetscFinalize(ierr)
160: end
162: subroutine MyResidual(A,b,x,r,ierr)
163: Mat A
164: Vec b,x,r
165: integer ierr
166: return
167: end