Actual source code: ex40.c
petsc-3.12.1 2019-10-22
2: static char help[] = "Solves a linear system in parallel with KSP.\n\
3: Input parameters include:\n\
4: -random_exact_sol : use a random exact solution vector\n\
5: -view_exact_sol : write exact solution vector to stdout\n\
6: -m <mesh_x> : number of mesh points in x-direction\n\
7: -n <mesh_n> : number of mesh points in y-direction\n\n";
9: /*T
10: Concepts: KSP^basic parallel example;
11: Concepts: KSP^Laplacian, 2d
12: Concepts: Laplacian, 2d
13: Processors: n
14: T*/
18: /*
19: Include "petscksp.h" so that we can use KSP solvers. Note that this file
20: automatically includes:
21: petscsys.h - base PETSc routines petscvec.h - vectors
22: petscmat.h - matrices
23: petscis.h - index sets petscksp.h - Krylov subspace methods
24: petscviewer.h - viewers petscpc.h - preconditioners
25: */
26: #include <petscksp.h>
28: int main(int argc,char **args)
29: {
30: Vec x,b,u; /* approx solution, RHS, exact solution */
31: Mat A; /* linear system matrix */
32: KSP ksp; /* linear solver context */
33: PetscRandom rctx; /* random number generator context */
34: PetscReal norm; /* norm of solution error */
35: PetscInt i,j,Ii,J,m = 8,n = 7,its;
37: PetscBool flg = PETSC_FALSE;
38: PetscScalar v;
39: PetscMPIInt rank;
41: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
42: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
43: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
44: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
45: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
46: Compute the matrix and right-hand-side vector that define
47: the linear system, Ax = b.
48: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
49: /*
50: Create parallel matrix, specifying only its global dimensions.
51: When using MatCreate(), the matrix format can be specified at
52: runtime. Also, the parallel partitioning of the matrix is
53: determined by PETSc at runtime.
55: Performance tuning note: For problems of substantial size,
56: preallocation of matrix memory is crucial for attaining good
57: performance. See the matrix chapter of the users manual for details.
58: */
59: MatCreate(PETSC_COMM_WORLD,&A);
60: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
61: MatSetType(A,MATELEMENTAL);
62: MatSetFromOptions(A);
63: MatSetUp(A);
64: if (rank==0) {
65: PetscInt M,N;
66: MatGetSize(A,&M,&N);
67: for (Ii=0; Ii<M; Ii++) {
68: v = -1.0; i = Ii/n; j = Ii - i*n;
69: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
70: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
71: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
72: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
73: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,ADD_VALUES);
74: }
75: }
76: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
77: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
79: /* MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE); */
81: /*
82: Create parallel vectors.
83: - We form 1 vector from scratch and then duplicate as needed.
84: - When using VecCreate(), VecSetSizes and VecSetFromOptions()
85: in this example, we specify only the
86: vector's global dimension; the parallel partitioning is determined
87: at runtime.
88: - When solving a linear system, the vectors and matrices MUST
89: be partitioned accordingly. PETSc automatically generates
90: appropriately partitioned matrices and vectors when MatCreate()
91: and VecCreate() are used with the same communicator.
92: - The user can alternatively specify the local vector and matrix
93: dimensions when more sophisticated partitioning is needed
94: (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
95: below).
96: */
97: VecCreate(PETSC_COMM_WORLD,&u);
98: VecSetSizes(u,PETSC_DECIDE,m*n);
99: VecSetFromOptions(u);
100: VecDuplicate(u,&b);
101: VecDuplicate(b,&x);
103: /*
104: Set exact solution; then compute right-hand-side vector.
105: By default we use an exact solution of a vector with all
106: elements of 1.0; Alternatively, using the runtime option
107: -random_sol forms a solution vector with random components.
108: */
109: PetscOptionsGetBool(NULL,NULL,"-random_exact_sol",&flg,NULL);
110: if (flg) {
111: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
112: PetscRandomSetFromOptions(rctx);
113: VecSetRandom(u,rctx);
114: PetscRandomDestroy(&rctx);
115: } else {
116: VecSet(u,1.0);
117: }
118: MatMult(A,u,b);
120: /*
121: View the exact solution vector if desired
122: */
123: flg = PETSC_FALSE;
124: PetscOptionsGetBool(NULL,NULL,"-view_exact_sol",&flg,NULL);
125: if (flg) {VecView(u,PETSC_VIEWER_STDOUT_WORLD);}
127: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
128: Create the linear solver and set various options
129: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
131: /*
132: Create linear solver context
133: */
134: KSPCreate(PETSC_COMM_WORLD,&ksp);
136: /*
137: Set operators. Here the matrix that defines the linear system
138: also serves as the preconditioning matrix.
139: */
140: KSPSetOperators(ksp,A,A);
142: /*
143: Set linear solver defaults for this problem (optional).
144: - By extracting the KSP and PC contexts from the KSP context,
145: we can then directly call any KSP and PC routines to set
146: various options.
147: - The following two statements are optional; all of these
148: parameters could alternatively be specified at runtime via
149: KSPSetFromOptions(). All of these defaults can be
150: overridden at runtime, as indicated below.
151: */
152: KSPSetTolerances(ksp,1.e-2/((m+1)*(n+1)),1.e-50,PETSC_DEFAULT,
153: PETSC_DEFAULT);
155: /*
156: Set runtime options, e.g.,
157: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
158: These options will override those specified above as long as
159: KSPSetFromOptions() is called _after_ any other customization
160: routines.
161: */
162: KSPSetFromOptions(ksp);
164: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165: Solve the linear system
166: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168: KSPSolve(ksp,b,x);
170: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
171: Check solution and clean up
172: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
174: /*
175: Check the error
176: */
177: VecAXPY(x,-1.0,u);
178: VecNorm(x,NORM_2,&norm);
179: KSPGetIterationNumber(ksp,&its);
181: /*
182: Print convergence information. PetscPrintf() produces a single
183: print statement from all processes that share a communicator.
184: An alternative is PetscFPrintf(), which prints to a file.
185: */
186: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
188: /*
189: Free work space. All PETSc objects should be destroyed when they
190: are no longer needed.
191: */
192: KSPDestroy(&ksp);
193: VecDestroy(&u); VecDestroy(&x);
194: VecDestroy(&b); MatDestroy(&A);
196: /*
197: Always call PetscFinalize() before exiting a program. This routine
198: - finalizes the PETSc libraries as well as MPI
199: - provides summary and diagnostic information if certain runtime
200: options are chosen (e.g., -log_view).
201: */
202: PetscFinalize();
203: return ierr;
204: }
207: /*TEST
209: test:
210: nsize: 6
211: args: -pc_type none
212: requires: elemental
214: test:
215: suffix: 2
216: nsize: 6
217: args: -pc_type lu -pc_factor_mat_solver_type elemental
218: requires: elemental
220: TEST*/