Actual source code: ex8f.F
1: !
2: ! "$Id: ex8f.F,v 1.2 2001/01/15 21:47:06 bsmith Exp $";
3: !
4: ! Tests MGSetResidual
5: !
6: ! -----------------------------------------------------------------------
8: program main
9: implicit none
11: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
12: ! Include files
13: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
14: !
15: !
16: #include include/finclude/petsc.h
17: #include include/finclude/petscvec.h
18: #include include/finclude/petscmat.h
19: #include include/finclude/petscpc.h
20: #include include/finclude/petscksp.h
21: #include include/finclude/petscmg.h
22: #include include/finclude/petscsles.h
23: #include include/finclude/petscsys.h
24: !
25: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
26: ! Variable declarations
27: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
28: !
29: ! Variables:
30: ! sles - linear solver context
31: ! x, b, u - approx solution, right-hand-side, exact solution vectors
32: ! A - matrix that defines linear system
33: ! its - iterations for convergence
34: ! norm - norm of error in solution
35: ! rctx - random number context
36: !
38: SLES sles
39: Mat A
40: Vec x,b,u
41: PC pc
42: integer n,dim,ierr,istart,iend,i,j,jj,ii
43: double precision v,h2
44: external MyResidual
45: PetscScalar pfive
47: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
48: ! Beginning of program
49: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
51: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
52: pfive = .5d0
53: n = 6
54: dim = n*n
56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
57: ! Compute the matrix and right-hand-side vector that define
58: ! the linear system, Ax = b.
59: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
61: ! Create parallel matrix, specifying only its global dimensions.
62: ! When using MatCreate(), the matrix format can be specified at
63: ! runtime. Also, the parallel partitioning of the matrix is
64: ! determined by PETSc at runtime.
66: call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,dim, &
67: & dim,A,ierr)
68: call MatSetFromOptions(A,ierr)
70: ! Currently, all PETSc parallel matrix formats are partitioned by
71: ! contiguous chunks of rows across the processors. Determine which
72: ! rows of the matrix are locally owned.
74: call MatGetOwnershipRange(A,Istart,Iend,ierr)
76: ! Set matrix elements in parallel.
77: ! - Each processor needs to insert only elements that it owns
78: ! locally (but any non-local elements will be sent to the
79: ! appropriate processor during matrix assembly).
80: ! - Always specify global rows and columns of matrix entries.
82: h2 = 1.0/((n+1)*(n+1))
84: do 10, II=Istart,Iend-1
85: v = -1.0
86: i = II/n
87: j = II - i*n
88: if (i.gt.0) then
89: JJ = II - n
90: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
91: endif
92: if (i.lt.n-1) then
93: JJ = II + n
94: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
95: endif
96: if (j.gt.0) then
97: JJ = II - 1
98: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
99: endif
100: if (j.lt.n-1) then
101: JJ = II + 1
102: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
103: endif
104: v = 4.0
105: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
106: 10 continue
108: ! Assemble matrix, using the 2-step process:
109: ! MatAssemblyBegin(), MatAssemblyEnd()
110: ! Computations can be done while messages are in transition
111: ! by placing code between these two statements.
113: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
114: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
116: ! Create parallel vectors.
117: ! - Here, the parallel partitioning of the vector is determined by
118: ! PETSc at runtime. We could also specify the local dimensions
119: ! if desired.
120: ! - Note: We form 1 vector from scratch and then duplicate as needed.
122: call VecCreate(PETSC_COMM_WORLD,u,ierr)
123: call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
124: call VecSetFromOptions(u,ierr)
125: call VecDuplicate(u,b,ierr)
126: call VecDuplicate(b,x,ierr)
128: ! Set exact solution; then compute right-hand-side vector.
130: call VecSet(pfive,u,ierr)
131: call MatMult(A,u,b,ierr)
133: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
134: ! Create the linear solver and set various options
135: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137: ! Create linear solver context
139: call SLESCreate(PETSC_COMM_WORLD,sles,ierr)
140: call SLESGetPC(sles,pc,ierr)
141: call PCSetType(pc,PCMG,ierr)
142: call MGSetLevels(pc,1,PETSC_NULL_OBJECT,ierr)
143: call MGSetResidual(pc,0,MGDefaultResidual,A,ierr)
145: call MGSetResidual(pc,0,MyResidual,A,ierr)
147: ! Set operators. Here the matrix that defines the linear system
148: ! also serves as the preconditioning matrix.
150: call SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN, &
151: & ierr)
154: call SLESDestroy(sles,ierr)
155: call VecDestroy(u,ierr)
156: call VecDestroy(x,ierr)
157: call VecDestroy(b,ierr)
158: call MatDestroy(A,ierr)
160: 200 continue
161: call PetscFinalize(ierr)
162: end
164: subroutine MyResidual(A,b,x,r,ierr)
165: Mat A
166: Vec b,x,r
167: integer ierr
168: return
169: end