Actual source code: ex4.c

  1: /*$Id: ex4.c,v 1.53 2001/08/07 03:04:00 balay Exp $*/

  3: static char help[] = "Uses a different preconditioner matrix and linear system matrix in the KSP solvers.\n\
  4: Note that different storage formats\n\
  5: can be used for the different matrices.\n\n";

  7: /*T
  8:    Concepts: KSP^different matrices for linear system and preconditioner;
  9:    Processors: n
 10: T*/

 12: /* 
 13:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 14:   automatically includes:
 15:      petsc.h       - base PETSc routines   petscvec.h - vectors
 16:      petscsys.h    - system routines       petscmat.h - matrices
 17:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 18:      petscviewer.h - viewers               petscpc.h  - preconditioners
 19: */
 20:  #include petscksp.h

 24: int main(int argc,char **args)
 25: {
 26:   KSP        ksp;      /* linear solver context */
 27:   Mat         A,B;      /* linear system matrix, preconditioning matrix */
 28:   PetscRandom rctx;      /* random number generator context */
 29:   Vec         x,b,u;   /* approx solution, RHS, exact solution */
 30:   Vec         tmp;       /* work vector */
 31:   PetscScalar v,one = 1.0,scale = 0.0;
 32:   int         i,j,m = 15,n = 17,I,J,ierr,Istart,Iend;

 34:   PetscInitialize(&argc,&args,(char *)0,help);
 35:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 36:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
 37:   PetscOptionsGetScalar(PETSC_NULL,"-scale",&scale,PETSC_NULL);

 39:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
 40:          Compute the matrix and right-hand-side vector that define
 41:          the linear system,Ax = b.  Also, create a different
 42:          preconditioner matrix.
 43:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 45:   /*
 46:      Create the linear system matrix (A).
 47:       - Here we use a block diagonal matrix format (MATBDIAG) and
 48:         specify only the global size.  The parallel partitioning of
 49:         the matrix will be determined at runtime by PETSc.
 50:   */
 51:   MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
 52:   MatSetType(A,MATBDIAG);
 53:   MatSeqBDiagSetPreallocation(A,0,1,PETSC_NULL,PETSC_NULL);
 54:   MatMPIBDiagSetPreallocation(A,0,1,PETSC_NULL,PETSC_NULL);

 56:   /* 
 57:      Create a different preconditioner matrix (B).  This is usually
 58:      done to form a cheaper (or sparser) preconditioner matrix
 59:      compared to the linear system matrix.
 60:       - Here we use MatCreate() followed by MatSetFromOptions(),
 61:         so that the matrix format and parallel partitioning will be
 62:         determined at runtime.
 63:   */
 64:   MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&B);
 65:   MatSetFromOptions(B);

 67:   /* 
 68:      Currently, all PETSc parallel matrix formats are partitioned by
 69:      contiguous chunks of rows across the processors.  Determine which
 70:      rows of the matrix are locally owned. 
 71:   */
 72:   MatGetOwnershipRange(A,&Istart,&Iend);

 74:   /*
 75:      Set entries within the two matrices
 76:   */
 77:   for (I=Istart; I<Iend; I++) {
 78:     v = -1.0; i = I/n; j = I - i*n;
 79:     if (i>0) {
 80:       J=I-n;
 81:       MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
 82:       MatSetValues(B,1,&I,1,&J,&v,INSERT_VALUES);
 83:     }
 84:     if (i<m-1) {
 85:       J=I+n;
 86:       MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
 87:       MatSetValues(B,1,&I,1,&J,&v,INSERT_VALUES);
 88:     }
 89:     if (j>0) {
 90:       J=I-1;
 91:       MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
 92:     }
 93:     if (j<n-1) {
 94:       J=I+1;
 95:       MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
 96:     }
 97:     v = 5.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
 98:     v = 3.0; MatSetValues(B,1,&I,1,&I,&v,INSERT_VALUES);
 99:   }

101:   /*
102:      Assemble the preconditioner matrix (B), using the 2-step process
103:        MatAssemblyBegin(), MatAssemblyEnd()
104:      Note that computations can be done while messages are in
105:      transition by placing code between these two statements.
106:   */
107:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
108:   for (I=Istart; I<Iend; I++) {
109:     v = -0.5; i = I/n;
110:     if (i>1) {
111:       J=I-(n+1); MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
112:     }
113:     if (i<m-2) {
114:       J=I+n+1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
115:     }
116:   }
117:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);

119:   /* 
120:      Assemble the linear system matrix, (A)
121:   */
122:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
123:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

125:   /* 
126:      Create parallel vectors.
127:       - When using VecSeSizes(), we specify only the vector's global
128:         dimension; the parallel partitioning is determined at runtime. 
129:       - Note: We form 1 vector from scratch and then duplicate as needed.
130:   */
131:   VecCreate(PETSC_COMM_WORLD,&b);
132:   VecSetSizes(b,PETSC_DECIDE,m*n);
133:   VecSetFromOptions(b);
134:   VecDuplicate(b,&u);
135:   VecDuplicate(b,&x);

137:   /* 
138:      Make solution vector be 1 to random noise
139:   */
140:   VecSet(&one,u);
141:   VecDuplicate(u,&tmp);
142:   PetscRandomCreate(PETSC_COMM_WORLD,RANDOM_DEFAULT,&rctx);
143:   VecSetRandom(rctx,tmp);
144:   PetscRandomDestroy(rctx);
145:   VecAXPY(&scale,tmp,u);
146:   VecDestroy(tmp);

148:   /*
149:      Compute right-hand-side vector 
150:   */
151:   MatMult(A,u,b);

153:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
154:                 Create the linear solver and set various options
155:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

157:   /* 
158:     Create linear solver context
159:   */
160:   KSPCreate(PETSC_COMM_WORLD,&ksp);

162:   /* 
163:      Set operators. Note that we use different matrices to define the
164:      linear system and to precondition it.
165:   */
166:   KSPSetOperators(ksp,A,B,DIFFERENT_NONZERO_PATTERN);

168:   /* 
169:      Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
170:   */
171:   KSPSetFromOptions(ksp);

173:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
174:                       Solve the linear system
175:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

177:   KSPSetRhs(ksp,b);
178:   KSPSetSolution(ksp,x);
179:   KSPSolve(ksp);

181:   /* 
182:      Free work space.  All PETSc objects should be destroyed when they
183:      are no longer needed.
184:   */
185:   KSPDestroy(ksp); VecDestroy(u);
186:   MatDestroy(B);     VecDestroy(x);
187:   MatDestroy(A);     VecDestroy(b);

189:   /*
190:      Always call PetscFinalize() before exiting a program.  This routine
191:        - finalizes the PETSc libraries as well as MPI
192:        - provides summary and diagnostic information if certain runtime
193:          options are chosen (e.g., -log_summary). 
194:   */
195:   PetscFinalize();
196:   return 0;
197: }