Actual source code: ex4.c

  1: 
  2: static char help[] = "Tests various 2-dimensional DA routines.\n\n";

 4:  #include petscda.h
 5:  #include petscsys.h

  9: int main(int argc,char **argv)
 10: {
 11:   PetscMPIInt    rank;
 13:   PetscInt       M = 10,N = 8,m = PETSC_DECIDE;
 14:   PetscInt       s=2,w=2,n = PETSC_DECIDE,nloc,l,i,j,kk;
 15:   PetscInt       Xs,Xm,Ys,Ym,iloc,*iglobal,*ltog;
 16:   PetscInt       *lx = PETSC_NULL,*ly = PETSC_NULL;
 17:   PetscTruth     testorder,flg;
 18:   DAPeriodicType wrap = DA_NONPERIODIC;
 19:   DA             da;
 20:   PetscViewer    viewer;
 21:   Vec            local,global;
 22:   PetscScalar    value;
 23:   DAStencilType  st = DA_STENCIL_BOX;
 24:   AO             ao;
 25: 
 26:   PetscInitialize(&argc,&argv,(char*)0,help);
 27:   PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,400,&viewer);
 28: 
 29:   /* Readoptions */
 30:   PetscOptionsGetInt(PETSC_NULL,"-M",&M,PETSC_NULL);
 31:   PetscOptionsGetInt(PETSC_NULL,"-N",&N,PETSC_NULL);
 32:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 33:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
 34:   PetscOptionsGetInt(PETSC_NULL,"-s",&s,PETSC_NULL);
 35:   PetscOptionsGetInt(PETSC_NULL,"-w",&w,PETSC_NULL);
 36:   PetscOptionsHasName(PETSC_NULL,"-xwrap",&flg); if (flg)  wrap = DA_XPERIODIC;
 37:   PetscOptionsHasName(PETSC_NULL,"-ywrap",&flg); if (flg)  wrap = DA_YPERIODIC;
 38:   PetscOptionsHasName(PETSC_NULL,"-xywrap",&flg); if (flg) wrap = DA_XYPERIODIC;
 39:   PetscOptionsHasName(PETSC_NULL,"-star",&flg); if (flg)   st = DA_STENCIL_STAR;
 40:   PetscOptionsHasName(PETSC_NULL,"-testorder",&testorder);
 41:   /*
 42:       Test putting two nodes in x and y on each processor, exact last processor 
 43:       in x and y gets the rest.
 44:   */
 45:   PetscOptionsHasName(PETSC_NULL,"-distribute",&flg);
 46:   if (flg) {
 47:     if (m == PETSC_DECIDE) SETERRQ(1,"Must set -m option with -distribute option");
 48:     PetscMalloc(m*sizeof(PetscInt),&lx);
 49:     for (i=0; i<m-1; i++) { lx[i] = 4;}
 50:     lx[m-1] = M - 4*(m-1);
 51:     if (n == PETSC_DECIDE) SETERRQ(1,"Must set -n option with -distribute option");
 52:     PetscMalloc(n*sizeof(PetscInt),&ly);
 53:     for (i=0; i<n-1; i++) { ly[i] = 2;}
 54:     ly[n-1] = N - 2*(n-1);
 55:   }


 58:   /* Create distributed array and get vectors */
 59:   DACreate2d(PETSC_COMM_WORLD,wrap,st,M,N,m,n,w,s,lx,ly,&da);
 60:   if (lx) {
 61:     PetscFree(lx);
 62:     PetscFree(ly);
 63:   }

 65:   DAView(da,viewer);
 66:   DACreateGlobalVector(da,&global);
 67:   DACreateLocalVector(da,&local);

 69:   /* Set global vector; send ghost points to local vectors */
 70:   value = 1;
 71:   VecSet(global,value);
 72:   DAGlobalToLocalBegin(da,global,INSERT_VALUES,local);
 73:   DAGlobalToLocalEnd(da,global,INSERT_VALUES,local);

 75:   /* Scale local vectors according to processor rank; pass to global vector */
 76:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 77:   value = rank;
 78:   VecScale(local,value);
 79:   DALocalToGlobal(da,local,INSERT_VALUES,global);

 81:   if (!testorder) { /* turn off printing when testing ordering mappings */
 82:     PetscPrintf (PETSC_COMM_WORLD,"\nGlobal Vectors:\n");
 83:     PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_NATIVE);
 84:     VecView(global,PETSC_VIEWER_STDOUT_WORLD);
 85:     PetscPrintf (PETSC_COMM_WORLD,"\n\n");
 86:   }

 88:   /* Send ghost points to local vectors */
 89:   DAGlobalToLocalBegin(da,global,INSERT_VALUES,local);
 90:   DAGlobalToLocalEnd(da,global,INSERT_VALUES,local);

 92:   PetscOptionsHasName(PETSC_NULL,"-local_print",&flg);
 93:   if (flg) {
 94:     PetscViewer sviewer;
 95:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
 96:     PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
 97:     VecView(local,sviewer);
 98:     PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
 99:   }

101:   /* Tests mappings betweeen application/PETSc orderings */
102:   if (testorder) {
103:     DAGetGhostCorners(da,&Xs,&Ys,PETSC_NULL,&Xm,&Ym,PETSC_NULL);
104:     DAGetGlobalIndices(da,&nloc,&ltog);
105:     DAGetAO(da,&ao);
106:     PetscMalloc(nloc*sizeof(PetscInt),&iglobal);

108:     /* Set iglobal to be global indices for each processor's local and ghost nodes,
109:        using the DA ordering of grid points */
110:     kk = 0;
111:     for (j=Ys; j<Ys+Ym; j++) {
112:       for (i=Xs; i<Xs+Xm; i++) {
113:         iloc = w*((j-Ys)*Xm + i-Xs);
114:         for (l=0; l<w; l++) {
115:           iglobal[kk++] = ltog[iloc+l];
116:         }
117:       }
118:     }

120:     /* Map this to the application ordering (which for DAs is just the natural ordering
121:        that would be used for 1 processor, numbering most rapidly by x, then y) */
122:     AOPetscToApplication(ao,nloc,iglobal);

124:     /* Then map the application ordering back to the PETSc DA ordering */
125:     AOApplicationToPetsc(ao,nloc,iglobal);

127:     /* Verify the mappings */
128:     kk=0;
129:     for (j=Ys; j<Ys+Ym; j++) {
130:       for (i=Xs; i<Xs+Xm; i++) {
131:         iloc = w*((j-Ys)*Xm + i-Xs);
132:         for (l=0; l<w; l++) {
133:           if (iglobal[kk] != ltog[iloc+l]) {
134:             PetscFPrintf(PETSC_COMM_SELF,stdout,"[%d] Problem with mapping: j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",
135:                                 rank,j,i,l,ltog[iloc+l],iglobal[kk]);}
136:           kk++;
137:         }
138:       }
139:     }
140:     PetscFree(iglobal);
141:   }

143:   /* Free memory */
144:   PetscViewerDestroy(viewer);
145:   VecDestroy(local);
146:   VecDestroy(global);
147:   DADestroy(da);

149:   PetscFinalize();
150:   return 0;
151: }