Actual source code: ex12.c
petsc-3.12.1 2019-10-22
2: static char help[] = "Solves a linear system in parallel with KSP,\n\
3: demonstrating how to register a new preconditioner (PC) type.\n\
4: Input parameters include:\n\
5: -m <mesh_x> : number of mesh points in x-direction\n\
6: -n <mesh_n> : number of mesh points in y-direction\n\n";
8: /*T
9: Concepts: KSP^solving a system of linear equations
10: Concepts: KSP^Laplacian, 2d
11: Concepts: PC^registering preconditioners
12: Processors: n
13: T*/
15: /*
16: Demonstrates registering a new preconditioner (PC) type.
18: To register a PC type whose code is linked into the executable,
19: use PCRegister(). To register a PC type in a dynamic library use PCRegister()
21: Also provide the prototype for your PCCreate_XXX() function. In
22: this example we use the PETSc implementation of the Jacobi method,
23: PCCreate_Jacobi() just as an example.
25: See the file src/ksp/pc/impls/jacobi/jacobi.c for details on how to
26: write a new PC component.
28: See the manual page PCRegister() for details on how to register a method.
29: */
31: /*
32: Include "petscksp.h" so that we can use KSP solvers. Note that this file
33: automatically includes:
34: petscsys.h - base PETSc routines petscvec.h - vectors
35: petscmat.h - matrices
36: petscis.h - index sets petscksp.h - Krylov subspace methods
37: petscviewer.h - viewers petscpc.h - preconditioners
38: */
39: #include <petscksp.h>
41: PETSC_EXTERN PetscErrorCode PCCreate_Jacobi(PC);
43: int main(int argc,char **args)
44: {
45: Vec x,b,u; /* approx solution, RHS, exact solution */
46: Mat A; /* linear system matrix */
47: KSP ksp; /* linear solver context */
48: PetscReal norm; /* norm of solution error */
49: PetscInt i,j,Ii,J,Istart,Iend,m = 8,n = 7,its;
51: PetscScalar v,one = 1.0;
52: PC pc; /* preconditioner context */
54: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
55: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
56: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
58: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59: Compute the matrix and right-hand-side vector that define
60: the linear system, Ax = b.
61: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
62: /*
63: Create parallel matrix, specifying only its global dimensions.
64: When using MatCreate(), the matrix format can be specified at
65: runtime. Also, the parallel partitioning of the matrix can be
66: determined by PETSc at runtime.
67: */
68: MatCreate(PETSC_COMM_WORLD,&A);
69: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
70: MatSetFromOptions(A);
71: MatSetUp(A);
73: /*
74: Currently, all PETSc parallel matrix formats are partitioned by
75: contiguous chunks of rows across the processors. Determine which
76: rows of the matrix are locally owned.
77: */
78: MatGetOwnershipRange(A,&Istart,&Iend);
80: /*
81: Set matrix elements for the 2-D, five-point stencil in parallel.
82: - Each processor needs to insert only elements that it owns
83: locally (but any non-local elements will be sent to the
84: appropriate processor during matrix assembly).
85: - Always specify global rows and columns of matrix entries.
86: */
87: for (Ii=Istart; Ii<Iend; Ii++) {
88: v = -1.0; i = Ii/n; j = Ii - i*n;
89: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
90: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
91: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
92: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);}
93: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
94: }
96: /*
97: Assemble matrix, using the 2-step process:
98: MatAssemblyBegin(), MatAssemblyEnd()
99: Computations can be done while messages are in transition
100: by placing code between these two statements.
101: */
102: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
103: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
105: /*
106: Create parallel vectors.
107: - When using VecCreate(), VecSetSizes() and VecSetFromOptions(),
108: we specify only the vector's global
109: dimension; the parallel partitioning is determined at runtime.
110: - When solving a linear system, the vectors and matrices MUST
111: be partitioned accordingly. PETSc automatically generates
112: appropriately partitioned matrices and vectors when MatCreate()
113: and VecCreate() are used with the same communicator.
114: - Note: We form 1 vector from scratch and then duplicate as needed.
115: */
116: VecCreate(PETSC_COMM_WORLD,&u);
117: VecSetSizes(u,PETSC_DECIDE,m*n);
118: VecSetFromOptions(u);
119: VecDuplicate(u,&b);
120: VecDuplicate(b,&x);
122: /*
123: Set exact solution; then compute right-hand-side vector.
124: Use an exact solution of a vector with all elements of 1.0;
125: */
126: VecSet(u,one);
127: MatMult(A,u,b);
129: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
130: Create the linear solver and set various options
131: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
133: /*
134: Create linear solver context
135: */
136: KSPCreate(PETSC_COMM_WORLD,&ksp);
138: /*
139: Set operators. Here the matrix that defines the linear system
140: also serves as the preconditioning matrix.
141: */
142: KSPSetOperators(ksp,A,A);
144: /*
145: First register a new PC type with the command PCRegister()
146: */
147: PCRegister("ourjacobi",PCCreate_Jacobi);
149: /*
150: Set the PC type to be the new method
151: */
152: KSPGetPC(ksp,&pc);
153: PCSetType(pc,"ourjacobi");
155: /*
156: Set runtime options, e.g.,
157: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
158: These options will override those specified above as long as
159: KSPSetFromOptions() is called _after_ any other customization
160: routines.
161: */
162: KSPSetFromOptions(ksp);
164: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165: Solve the linear system
166: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168: KSPSolve(ksp,b,x);
170: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171: Check solution and clean up
172: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
174: /*
175: Check the error
176: */
177: VecAXPY(x,-1.0,u);
178: VecNorm(x,NORM_2,&norm);
179: KSPGetIterationNumber(ksp,&its);
181: /*
182: Print convergence information. PetscPrintf() produces a single
183: print statement from all processes that share a communicator.
184: */
185: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
187: /*
188: Free work space. All PETSc objects should be destroyed when they
189: are no longer needed.
190: */
191: KSPDestroy(&ksp);
192: VecDestroy(&u); VecDestroy(&x);
193: VecDestroy(&b); MatDestroy(&A);
195: /*
196: Always call PetscFinalize() before exiting a program. This routine
197: - finalizes the PETSc libraries as well as MPI
198: - provides summary and diagnostic information if certain runtime
199: options are chosen (e.g., -log_view).
200: */
201: PetscFinalize();
202: return ierr;
203: }
206: /*TEST
208: test:
209: args: -ksp_gmres_cgs_refinement_type refine_always
211: TEST*/