Actual source code: ex49.c

petsc-3.13.4 2020-08-01
Report Typos and Errors

  2: static char help[] = "Tests SeqSBAIJ factorizations for different block sizes\n\n";

  4:  #include <petscksp.h>

  6: int main(int argc,char **args)
  7: {
  8:   Vec            x,b,u;
  9:   Mat            A,A2;
 10:   KSP            ksp;
 11:   PetscRandom    rctx;
 12:   PetscReal      norm;
 13:   PetscInt       i,j,k,l,n = 27,its,bs = 2,Ii,J;
 15:   PetscBool      test_hermitian = PETSC_FALSE, convert = PETSC_FALSE;
 16:   PetscScalar    v;

 18:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 19:   PetscOptionsGetInt(NULL,NULL,"-bs",&bs,NULL);
 20:   PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
 21:   PetscOptionsGetBool(NULL,NULL,"-herm",&test_hermitian,NULL);
 22:   PetscOptionsGetBool(NULL,NULL,"-conv",&convert,NULL);

 24:   MatCreate(PETSC_COMM_SELF,&A);
 25:   MatSetSizes(A,n*bs,n*bs,PETSC_DETERMINE,PETSC_DETERMINE);
 26:   MatSetBlockSize(A,bs);
 27:   MatSetType(A,MATSEQSBAIJ);
 28:   MatSetFromOptions(A);
 29:   MatSeqSBAIJSetPreallocation(A,bs,n,NULL);
 30:   MatSeqBAIJSetPreallocation(A,bs,n,NULL);
 31:   MatSeqAIJSetPreallocation(A,n*bs,NULL);
 32:   MatMPIAIJSetPreallocation(A,n*bs,NULL,n*bs,NULL);

 34:   PetscRandomCreate(PETSC_COMM_SELF,&rctx);
 35:   for (i=0; i<n; i++) {
 36:     for (j=i; j<n; j++) {
 37:       PetscRandomGetValue(rctx,&v);
 38:       if (PetscRealPart(v) < .1 || i == j) {
 39:         for (k=0; k<bs; k++) {
 40:           for (l=0; l<bs; l++) {
 41:             Ii = i*bs + k;
 42:             J = j*bs + l;
 43:             PetscRandomGetValue(rctx,&v);
 44:             if (Ii == J) v = PetscRealPart(v+3*n*bs);
 45:             MatSetValue(A,Ii,J,v,INSERT_VALUES);
 46:             if (test_hermitian) {
 47:               MatSetValue(A,J,Ii,PetscConj(v),INSERT_VALUES);
 48:             } else {
 49:               MatSetValue(A,J,Ii,v,INSERT_VALUES);
 50:             }
 51:           }
 52:         }
 53:       }
 54:     }
 55:   }
 56:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 57:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

 59:   /* With complex numbers:
 60:      - PETSc cholesky does not support hermitian matrices
 61:      - CHOLMOD only supports hermitian matrices
 62:      - SUPERLU_DIST seems supporting both
 63:   */
 64:   if (test_hermitian) {
 65:     MatSetOption(A,MAT_HERMITIAN,PETSC_TRUE);
 66:   }

 68:   {
 69:     Mat M;
 70:     MatComputeOperator(A,MATAIJ,&M);
 71:     MatViewFromOptions(M,NULL,"-expl_view");
 72:     MatDestroy(&M);
 73:   }

 75:   A2 = NULL;
 76:   if (convert) {
 77:     MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&A2);
 78:   }

 80:   VecCreate(PETSC_COMM_SELF,&u);
 81:   VecSetSizes(u,PETSC_DECIDE,n*bs);
 82:   VecSetFromOptions(u);
 83:   VecDuplicate(u,&b);
 84:   VecDuplicate(b,&x);

 86:   VecSet(u,1.0);
 87:   MatMult(A,u,b);

 89:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 90:                 Create the linear solver and set various options
 91:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 93:   /*
 94:      Create linear solver context
 95:   */
 96:   KSPCreate(PETSC_COMM_SELF,&ksp);

 98:   /*
 99:      Set operators.
100:   */
101:   KSPSetOperators(ksp,A2 ? A2 : A,A);

103:   KSPSetFromOptions(ksp);

105:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
106:                       Solve the linear system
107:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

109:   KSPSolve(ksp,b,x);

111:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
112:                       Check solution and clean up
113:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

115:   /*
116:      Check the error
117:   */
118:   VecAXPY(x,-1.0,u);
119:   VecNorm(x,NORM_2,&norm);
120:   KSPGetIterationNumber(ksp,&its);

122:   /*
123:      Print convergence information.  PetscPrintf() produces a single
124:      print statement from all processes that share a communicator.
125:      An alternative is PetscFPrintf(), which prints to a file.
126:   */
127:   if (norm > 100*PETSC_SMALL) {
128:     PetscPrintf(PETSC_COMM_SELF,"Norm of residual %g iterations %D bs %D\n",(double)norm,its,bs);
129:   }

131:   /*
132:      Free work space.  All PETSc objects should be destroyed when they
133:      are no longer needed.
134:   */
135:   KSPDestroy(&ksp);
136:   VecDestroy(&u);
137:   VecDestroy(&x);
138:   VecDestroy(&b);
139:   MatDestroy(&A);
140:   MatDestroy(&A2);
141:   PetscRandomDestroy(&rctx);

143:   /*
144:      Always call PetscFinalize() before exiting a program.  This routine
145:        - finalizes the PETSc libraries as well as MPI
146:        - provides summary and diagnostic information if certain runtime
147:          options are chosen (e.g., -log_view).
148:   */
149:   PetscFinalize();
150:   return ierr;
151: }


154: /*TEST

156:    test:
157:       args: -mat_type {{aij baij sbaij}} -bs {{1 2 3 4 5 6 7 8 9 10 11 12}} -pc_type cholesky -herm 0 -conv {{0 1}}

159:    test:
160:       nsize: {{1 4}}
161:       suffix: cholmod
162:       requires: suitesparse
163:       args: -mat_type {{aij sbaij}} -bs 1 -pc_type cholesky -pc_factor_mat_solver_type cholmod -herm -conv {{0 1}}

165:    test:
166:       nsize: {{1 4}}
167:       suffix: superlu_dist
168:       requires: superlu_dist
169:       output_file: output/ex49_cholmod.out
170:       args: -mat_type mpiaij -bs 3 -pc_type cholesky -pc_factor_mat_solver_type superlu_dist -herm {{0 1}} -conv {{0 1}}

172:    test:
173:       suffix: mkl_pardiso
174:       requires: mkl_pardiso
175:       output_file: output/ex49_1.out
176:       args: -bs {{1 3}} -pc_type cholesky -pc_factor_mat_solver_type mkl_pardiso

178: TEST*/