Actual source code: ex43.c

petsc-3.13.4 2020-08-01
Report Typos and Errors
  1: static char help[] = "Reads a PETSc matrix from a file and solves a linear system \n\
  2: using the aijcusparse class. Input parameters are:\n\
  3:   -f <input_file> : the file to load\n\n";

  5: /*
  6:   This code can be used to test PETSc interface to other packages.\n\
  7:   Examples of command line options:       \n\
  8:    ./ex43 -f DATAFILESPATH/matrices/cfd.2.10 -mat_cusparse_mult_storage_format ell  \n\
  9:    ./ex43 -f DATAFILESPATH/matrices/shallow_water1 -ksp_type cg -pc_type icc -mat_cusparse_mult_storage_format ell  \n\
 10:    \n\n";
 11: */

 13:  #include <petscksp.h>

 15: int main(int argc,char **argv)
 16: {
 17:   KSP                ksp;
 18:   Mat                A;
 19:   Vec                X,B;
 20:   PetscInt           m, its;
 21:   PetscReal          norm;
 22:   char               file[PETSC_MAX_PATH_LEN];
 23:   PetscBool          flg;
 24:   PetscViewer        fd;
 25:   PetscErrorCode     ierr;

 27:   PetscInitialize(&argc,&argv,0,help);if (ierr) return ierr;
 28:   /* Load the data from a file */
 29:   PetscOptionsGetString(NULL,NULL,"-f",file,PETSC_MAX_PATH_LEN,&flg);
 30:   if (!flg) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER_INPUT,"Must indicate binary file with the -f option");
 31:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);

 33:   /* Build the matrix */
 34:   MatCreate(PETSC_COMM_WORLD,&A);
 35:   MatSetFromOptions(A);
 36:   MatLoad(A,fd);

 38:   /* Build the vectors */
 39:   MatGetLocalSize(A,&m,NULL);
 40:   VecCreate(PETSC_COMM_WORLD,&B);
 41:   VecSetSizes(B,m,PETSC_DECIDE);
 42:   VecCreate(PETSC_COMM_WORLD,&X);
 43:   VecSetSizes(X,m,PETSC_DECIDE);
 44:   VecSetFromOptions(B);
 45:   VecSetFromOptions(X);
 46:   VecSet(B,1.0);

 48:   /* Build the KSP */
 49:   KSPCreate(PETSC_COMM_WORLD,&ksp);
 50:   KSPSetOperators(ksp,A,A);
 51:   KSPSetType(ksp,KSPGMRES);
 52:   KSPSetTolerances(ksp,1.0e-12,PETSC_DEFAULT,PETSC_DEFAULT,100);
 53:   KSPSetFromOptions(ksp);

 55:   /* Solve */
 56:   KSPSolve(ksp,B,X);

 58:   /* print out norm and the number of iterations */
 59:   KSPGetIterationNumber(ksp,&its);
 60:   KSPGetResidualNorm(ksp,&norm);
 61:   PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %3D\n",its);
 62:   PetscPrintf(PETSC_COMM_WORLD,"Residual norm %1.5g\n",norm);

 64:   /* Cleanup */
 65:   VecDestroy(&X);
 66:   VecDestroy(&B);
 67:   MatDestroy(&A);
 68:   KSPDestroy(&ksp);
 69:   PetscViewerDestroy(&fd);
 70:   PetscFinalize();
 71:   return ierr;
 72: }


 75: /*TEST

 77:    build:
 78:       requires: cuda

 80:    test:
 81:       requires: cuda datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
 82:       args: -f ${DATAFILESPATH}/matrices/cfd.2.10 -mat_type seqaijcusparse -pc_factor_mat_solver_type cusparse -mat_cusparse_storage_format ell -vec_type cuda -pc_type ilu

 84:    test:
 85:       suffix: 2
 86:       requires: cuda datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
 87:       args: -f ${DATAFILESPATH}/matrices/shallow_water1 -mat_type seqaijcusparse -pc_factor_mat_solver_type cusparse -mat_cusparse_storage_format hyb -vec_type cuda -ksp_type cg -pc_type icc

 89:    test:
 90:       suffix: 3
 91:       requires: cuda datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
 92:       args: -f ${DATAFILESPATH}/matrices/cfd.2.10 -mat_type seqaijcusparse -pc_factor_mat_solver_type cusparse -mat_cusparse_storage_format csr -vec_type cuda -ksp_type bicg -pc_type ilu

 94:    test:
 95:       suffix: 4
 96:       requires: cuda datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
 97:       args: -f ${DATAFILESPATH}/matrices/cfd.2.10 -mat_type seqaijcusparse -pc_factor_mat_solver_type cusparse -mat_cusparse_storage_format csr -vec_type cuda -ksp_type bicg -pc_type ilu -pc_factor_mat_ordering_type nd

 99:    testset:
100:       nsize: 2
101:       requires: cuda datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
102:       args: -f ${DATAFILESPATH}/matrices/shallow_water1 -mat_type mpiaijcusparse -mat_cusparse_mult_diag_storage_format hyb -pc_type none -vec_type cuda
103:       test:
104:         suffix: 5
105:         args: -use_gpu_aware_mpi 0
106:       test:
107:         suffix: 5_gpu_aware_mpi
108:         output_file: output/ex43_5.out

110:    test:
111:       suffix: 6
112:       requires: cuda datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
113:       args: -f ${DATAFILESPATH}/matrices/shallow_water1 -mat_type seqaijcusparse -pc_type none -vec_type cuda

115:    testset:
116:       nsize: 2
117:       requires: cuda datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
118:       args: -f ${DATAFILESPATH}/matrices/shallow_water1 -mat_type mpiaijcusparse -pc_type none -vec_type cuda

120:       test:
121:         suffix: 7
122:         args: -use_gpu_aware_mpi 0
123:       test:
124:         suffix: 7_gpu_aware_mpi
125:         output_file: output/ex43_7.out

127:    test:
128:       suffix: 8
129:       requires: viennacl datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
130:       args: -f ${DATAFILESPATH}/matrices/shallow_water1 -mat_type seqaijviennacl -pc_type none -vec_type viennacl
131:       output_file: output/ex43_6.out

133:    test:
134:       suffix: 9
135:       nsize: 2
136:       requires: viennacl datafilespath double !complex !define(PETSC_USE_64BIT_INDICES)
137:       args: -f ${DATAFILESPATH}/matrices/shallow_water1 -mat_type mpiaijviennacl -pc_type none -vec_type viennacl
138:       output_file: output/ex43_7.out

140: TEST*/