Actual source code: ex17f.F
petsc-3.13.4 2020-08-01
1: !
2: !
3: ! "Scatters from a parallel vector to a sequential vector. In
4: ! this case each local vector is as long as the entire parallel vector.
5: !
6: #include <petsc/finclude/petscvec.h>
7: use petscvec
8: implicit none
10: PetscErrorCode ierr
11: PetscMPIInt size,rank
12: PetscInt n,NN,low,high
13: PetscInt iglobal,i,ione
14: PetscInt first,stride
15: PetscScalar value,zero
16: Vec x,y
17: IS is1,is2
18: VecScatter ctx
20: n = 5
21: zero = 0.0
22: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
23: if (ierr .ne. 0) then
24: print*,'Unable to initialize PETSc'
25: stop
26: endif
28: call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)
29: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
31: ! create two vectors
32: ! one parallel and one sequential. The sequential one on each processor
33: ! is as long as the entire parallel one.
35: NN = size*n
37: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,NN,y,ierr)
38: call VecCreateSeq(PETSC_COMM_SELF,NN,x,ierr)
40: call VecSet(x,zero,ierr)
41: call VecGetOwnershipRange(y,low,high,ierr)
42: ione = 1
43: do 10, i=0,n-1
44: iglobal = i + low
45: value = i + 10*rank
46: call VecSetValues(y,ione,iglobal,value,INSERT_VALUES,ierr)
47: 10 continue
49: call VecAssemblyBegin(y,ierr)
50: call VecAssemblyEnd(y,ierr)
51: !
52: ! View the parallel vector
53: !
54: call VecView(y,PETSC_VIEWER_STDOUT_WORLD,ierr)
56: ! create two index sets and the scatter context to move the contents of
57: ! of the parallel vector to each sequential vector. If you want the
58: ! parallel vector delivered to only one processor then create a is2
59: ! of length zero on all processors except the one to receive the parallel vector
61: first = 0
62: stride = 1
63: call ISCreateStride(PETSC_COMM_SELF,NN,first,stride,is1,ierr)
64: call ISCreateStride(PETSC_COMM_SELF,NN,first,stride,is2,ierr)
65: call VecScatterCreate(y,is2,x,is1,ctx,ierr)
66: call VecScatterBegin(ctx,y,x,ADD_VALUES,SCATTER_FORWARD,ierr)
67: call VecScatterEnd(ctx,y,x,ADD_VALUES,SCATTER_FORWARD,ierr)
68: call VecScatterDestroy(ctx,ierr)
69: !
70: ! View the sequential vector on the 0th processor
71: !
72: if (rank .eq. 0) then
73: call VecView(x,PETSC_VIEWER_STDOUT_SELF,ierr)
74: endif
76: #if defined(PETSC_HAVE_FORTRAN_TYPE_STAR)
77: call PetscBarrier(y,ierr)
78: call PetscBarrier(is1,ierr)
79: #endif
80: call VecDestroy(x,ierr)
81: call VecDestroy(y,ierr)
82: call ISDestroy(is1,ierr)
83: call ISDestroy(is2,ierr)
85: call PetscFinalize(ierr)
86: end
89: !/*TEST
90: !
91: ! test:
92: ! nsize: 3
93: ! filter: grep -v "MPI processes"
94: !
95: !TEST*/