Actual source code: ex8.c

  1: /*$Id: ex8.c,v 1.50 2001/08/07 03:04:00 balay Exp $*/

  3: static char help[] = "Illustrates use of the preconditioner ASM.\n\
  4: The Additive Schwarz Method for solving a linear system in parallel with KSP.  The\n\
  5: code indicates the procedure for setting user-defined subdomains.  Input\n\
  6: parameters include:\n\
  7:   -user_set_subdomain_solvers:  User explicitly sets subdomain solvers\n\
  8:   -user_set_subdomains:  Activate user-defined subdomains\n\n";

 10: /*
 11:    Note:  This example focuses on setting the subdomains for the ASM 
 12:    preconditioner for a problem on a 2D rectangular grid.  See ex1.c
 13:    and ex2.c for more detailed comments on the basic usage of KSP
 14:    (including working with matrices and vectors).

 16:    The ASM preconditioner is fully parallel, but currently the routine
 17:    PCASMCreateSubDomains2D(), which is used in this example to demonstrate
 18:    user-defined subdomains (activated via -user_set_subdomains), is
 19:    uniprocessor only.

 21:    This matrix in this linear system arises from the discretized Laplacian,
 22:    and thus is not very interesting in terms of experimenting with variants
 23:    of the ASM preconditioner.  
 24: */

 26: /*T
 27:    Concepts: KSP^Additive Schwarz Method (ASM) with user-defined subdomains
 28:    Processors: n
 29: T*/

 31: /* 
 32:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 33:   automatically includes:
 34:      petsc.h       - base PETSc routines   petscvec.h - vectors
 35:      petscsys.h    - system routines       petscmat.h - matrices
 36:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 37:      petscviewer.h - viewers               petscpc.h  - preconditioners
 38: */
 39:  #include petscksp.h

 43: int main(int argc,char **args)
 44: {
 45:   Vec          x,b,u;                 /* approx solution, RHS, exact solution */
 46:   Mat          A;                       /* linear system matrix */
 47:   KSP          ksp;                    /* linear solver context */
 48:   PC           pc;                      /* PC context */
 49:   IS           *is;                     /* array of index sets that define the subdomains */
 50:   int          overlap = 1;             /* width of subdomain overlap */
 51:   int          Nsub;                    /* number of subdomains */
 52:   int          m = 15,n = 17;          /* mesh dimensions in x- and y- directions */
 53:   int          M = 2,N = 1;            /* number of subdomains in x- and y- directions */
 54:   int          i,j,I,J,ierr,Istart,Iend,size;
 55:   PetscTruth   flg;
 56:   PetscTruth   user_subdomains;         /* flag - 1 indicates user-defined subdomains */
 57:   PetscScalar  v, one = 1.0;

 59:   PetscInitialize(&argc,&args,(char *)0,help);
 60:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 61:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 62:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
 63:   PetscOptionsGetInt(PETSC_NULL,"-M",&M,PETSC_NULL);
 64:   PetscOptionsGetInt(PETSC_NULL,"-N",&N,PETSC_NULL);
 65:   PetscOptionsGetInt(PETSC_NULL,"-overlap",&overlap,PETSC_NULL);
 66:   PetscOptionsHasName(PETSC_NULL,"-user_set_subdomains",&user_subdomains);

 68:   /* -------------------------------------------------------------------
 69:          Compute the matrix and right-hand-side vector that define
 70:          the linear system, Ax = b.
 71:      ------------------------------------------------------------------- */

 73:   /* 
 74:      Assemble the matrix for the five point stencil, YET AGAIN 
 75:   */
 76:   MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
 77:   MatSetFromOptions(A);
 78:   MatGetOwnershipRange(A,&Istart,&Iend);
 79:   for (I=Istart; I<Iend; I++) {
 80:     v = -1.0; i = I/n; j = I - i*n;
 81:     if (i>0)   {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 82:     if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 83:     if (j>0)   {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 84:     if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 85:     v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
 86:   }
 87:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 88:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

 90:   /* 
 91:      Create and set vectors 
 92:   */
 93:   VecCreate(PETSC_COMM_WORLD,&b);
 94:   VecSetSizes(b,PETSC_DECIDE,m*n);
 95:   VecSetFromOptions(b);
 96:   VecDuplicate(b,&u);
 97:   VecDuplicate(b,&x);
 98:   VecSet(&one,u);
 99:   MatMult(A,u,b);

101:   /* 
102:      Create linear solver context 
103:   */
104:   KSPCreate(PETSC_COMM_WORLD,&ksp);

106:   /* 
107:      Set operators. Here the matrix that defines the linear system
108:      also serves as the preconditioning matrix.
109:   */
110:   KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);

112:   /* 
113:      Set the default preconditioner for this program to be ASM
114:   */
115:   KSPGetPC(ksp,&pc);
116:   PCSetType(pc,PCASM);

118:   /* -------------------------------------------------------------------
119:                   Define the problem decomposition
120:      ------------------------------------------------------------------- */

122:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
123:        Basic method, should be sufficient for the needs of many users.
124:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

126:      Set the overlap, using the default PETSc decomposition via
127:          PCASMSetOverlap(pc,overlap);
128:      Could instead use the option -pc_asm_overlap <ovl> 

130:      Set the total number of blocks via -pc_asm_blocks <blks>
131:      Note:  The ASM default is to use 1 block per processor.  To
132:      experiment on a single processor with various overlaps, you
133:      must specify use of multiple blocks!
134:   */

136:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
137:        More advanced method, setting user-defined subdomains
138:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

140:      Firstly, create index sets that define the subdomains.  The utility
141:      routine PCASMCreateSubdomains2D() is a simple example (that currently
142:      supports 1 processor only!).  More generally, the user should write
143:      a custom routine for a particular problem geometry.

145:      Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
146:      to set the subdomains for the ASM preconditioner.
147:   */

149:   if (!user_subdomains) { /* basic version */
150:     PCASMSetOverlap(pc,overlap);
151:   } else { /* advanced version */
152:     if (size != 1) SETERRQ(1,"PCASMCreateSubdomains() is currently a uniprocessor routine only!");
153:     PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is);
154:     PCASMSetLocalSubdomains(pc,Nsub,is);
155:   }

157:   /* -------------------------------------------------------------------
158:                 Set the linear solvers for the subblocks
159:      ------------------------------------------------------------------- */

161:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
162:        Basic method, should be sufficient for the needs of most users.
163:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

165:      By default, the ASM preconditioner uses the same solver on each
166:      block of the problem.  To set the same solver options on all blocks,
167:      use the prefix -sub before the usual PC and KSP options, e.g.,
168:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4

170:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
171:         Advanced method, setting different solvers for various blocks.
172:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

174:      Note that each block's KSP context is completely independent of
175:      the others, and the full range of uniprocessor KSP options is
176:      available for each block.

178:      - Use PCASMGetSubKSP() to extract the array of KSP contexts for
179:        the local blocks.
180:      - See ex7.c for a simple example of setting different linear solvers
181:        for the individual blocks for the block Jacobi method (which is
182:        equivalent to the ASM method with zero overlap).
183:   */

185:   PetscOptionsHasName(PETSC_NULL,"-user_set_subdomain_solvers",&flg);
186:   if (flg) {
187:     KSP       *subksp;       /* array of KSP contexts for local subblocks */
188:     int        nlocal,first;  /* number of local subblocks, first local subblock */
189:     PC         subpc;          /* PC context for subblock */
190:     PetscTruth isasm;

192:     PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.\n");

194:     /* 
195:        Set runtime options
196:     */
197:     KSPSetFromOptions(ksp);

199:     /* 
200:        Flag an error if PCTYPE is changed from the runtime options
201:      */
202:     PetscTypeCompare((PetscObject)pc,PCASM,&isasm);
203:     if (isasm) {
204:       SETERRQ(1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
205:     }
206:     /* 
207:        Call KSPSetUp() to set the block Jacobi data structures (including
208:        creation of an internal KSP context for each block).

210:        Note: KSPSetUp() MUST be called before PCASMGetSubKSP().
211:     */
212:     KSPSetRhs(ksp,b);
213:     KSPSetSolution(ksp,x);
214:     KSPSetUp(ksp);

216:     /*
217:        Extract the array of KSP contexts for the local blocks
218:     */
219:     PCASMGetSubKSP(pc,&nlocal,&first,&subksp);

221:     /*
222:        Loop over the local blocks, setting various KSP options
223:        for each block.  
224:     */
225:     for (i=0; i<nlocal; i++) {
226:       KSPGetPC(subksp[i],&subpc);
227:       PCSetType(subpc,PCILU);
228:       KSPSetType(subksp[i],KSPGMRES);
229:       KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
230:     }
231:   } else {
232:     /* 
233:        Set runtime options
234:     */
235:     KSPSetFromOptions(ksp);
236:   }

238:   /* -------------------------------------------------------------------
239:                       Solve the linear system
240:      ------------------------------------------------------------------- */

242:   KSPSetRhs(ksp,b);
243:   KSPSetSolution(ksp,x);
244:   KSPSolve(ksp);

246:   /* 
247:      Free work space.  All PETSc objects should be destroyed when they
248:      are no longer needed.
249:   */

251:   if (user_subdomains) {
252:     for (i=0; i<Nsub; i++) {
253:       ISDestroy(is[i]);
254:     }
255:     PetscFree(is);
256:   }
257:   KSPDestroy(ksp);
258:   VecDestroy(u);
259:   VecDestroy(x);
260:   VecDestroy(b);
261:   MatDestroy(A);
262:   PetscFinalize();
263:   return 0;
264: }