Actual source code: ex26.c

petsc-3.12.1 2019-10-22
Report Typos and Errors
  1: static char help[] ="Solvers Laplacian with multigrid, bad way.\n\
  2:   -mx <xg>, where <xg> = number of grid points in the x-direction\n\
  3:   -my <yg>, where <yg> = number of grid points in the y-direction\n\
  4:   -Nx <npx>, where <npx> = number of processors in the x-direction\n\
  5:   -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";

  7: /*  Modified from ~src/ksp/examples/tests/ex19.c. Used for testing ML 6.2 interface.

  9:     This problem is modeled by
 10:     the partial differential equation

 12:             -Laplacian u  = g,  0 < x,y < 1,

 14:     with boundary conditions

 16:              u = 0  for  x = 0, x = 1, y = 0, y = 1.

 18:     A finite difference approximation with the usual 5-point stencil
 19:     is used to discretize the boundary value problem to obtain a nonlinear
 20:     system of equations.

 22:     Usage: ./ex26 -ksp_monitor_short -pc_type ml
 23:            -mg_coarse_ksp_max_it 10
 24:            -mg_levels_1_ksp_max_it 10 -mg_levels_2_ksp_max_it 10
 25:            -mg_fine_ksp_max_it 10
 26: */

 28:  #include <petscksp.h>
 29:  #include <petscdm.h>
 30:  #include <petscdmda.h>

 32: /* User-defined application contexts */
 33: typedef struct {
 34:   PetscInt mx,my;              /* number grid points in x and y direction */
 35:   Vec      localX,localF;      /* local vectors with ghost region */
 36:   DM       da;
 37:   Vec      x,b,r;              /* global vectors */
 38:   Mat      J;                  /* Jacobian on grid */
 39:   Mat      A,P,R;
 40:   KSP      ksp;
 41: } GridCtx;
 42: extern int FormJacobian_Grid(GridCtx*,Mat*);

 44: int main(int argc,char **argv)
 45: {
 47:   PetscInt       its,n,Nx=PETSC_DECIDE,Ny=PETSC_DECIDE,nlocal;
 48:   PetscMPIInt    size;
 49:   PetscScalar    one = 1.0;
 50:   PetscInt       mx,my;
 51:   Mat            A;
 52:   GridCtx        fine_ctx;
 53:   KSP            ksp;
 54:   PetscBool      flg;

 56:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 57:   /* set up discretization matrix for fine grid */
 58:   fine_ctx.mx = 9; fine_ctx.my = 9;
 59:   PetscOptionsGetInt(NULL,NULL,"-mx",&mx,&flg);
 60:   if (flg) fine_ctx.mx = mx;
 61:   PetscOptionsGetInt(NULL,NULL,"-my",&my,&flg);
 62:   if (flg) fine_ctx.my = my;
 63:   PetscPrintf(PETSC_COMM_WORLD,"Fine grid size %D by %D\n",fine_ctx.mx,fine_ctx.my);
 64:   n    = fine_ctx.mx*fine_ctx.my;

 66:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 67:   PetscOptionsGetInt(NULL,NULL,"-Nx",&Nx,NULL);
 68:   PetscOptionsGetInt(NULL,NULL,"-Ny",&Ny,NULL);

 70:   /* Set up distributed array for fine grid */
 71:   DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,fine_ctx.mx,fine_ctx.my,Nx,Ny,1,1,NULL,NULL,&fine_ctx.da);
 72:   DMSetFromOptions(fine_ctx.da);
 73:   DMSetUp(fine_ctx.da);
 74:   DMCreateGlobalVector(fine_ctx.da,&fine_ctx.x);
 75:   VecDuplicate(fine_ctx.x,&fine_ctx.b);
 76:   VecGetLocalSize(fine_ctx.x,&nlocal);
 77:   DMCreateLocalVector(fine_ctx.da,&fine_ctx.localX);
 78:   VecDuplicate(fine_ctx.localX,&fine_ctx.localF);
 79:   MatCreateAIJ(PETSC_COMM_WORLD,nlocal,nlocal,n,n,5,NULL,3,NULL,&A);
 80:   FormJacobian_Grid(&fine_ctx,&A);

 82:   /* create linear solver */
 83:   KSPCreate(PETSC_COMM_WORLD,&ksp);

 85:   /* set values for rhs vector */
 86:   VecSet(fine_ctx.b,one);

 88:   /* set options, then solve system */
 89:   KSPSetFromOptions(ksp); /* calls PCSetFromOptions_ML if 'pc_type=ml' */
 90:   KSPSetOperators(ksp,A,A);
 91:   KSPSolve(ksp,fine_ctx.b,fine_ctx.x);
 92:   KSPGetIterationNumber(ksp,&its);
 93:   PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %D\n",its);

 95:   /* free data structures */
 96:   VecDestroy(&fine_ctx.x);
 97:   VecDestroy(&fine_ctx.b);
 98:   DMDestroy(&fine_ctx.da);
 99:   VecDestroy(&fine_ctx.localX);
100:   VecDestroy(&fine_ctx.localF);
101:   MatDestroy(&A);
102:   KSPDestroy(&ksp);

104:   PetscFinalize();
105:   return ierr;
106: }

108: int FormJacobian_Grid(GridCtx *grid,Mat *J)
109: {
110:   Mat                    jac = *J;
111:   PetscErrorCode         ierr;
112:   PetscInt               i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
113:   PetscInt               grow;
114:   const PetscInt         *ltog;
115:   PetscScalar            two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
116:   ISLocalToGlobalMapping ltogm;

118:   mx    = grid->mx;            my = grid->my;
119:   hx    = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
120:   hxdhy = hx/hy;            hydhx = hy/hx;

122:   /* Get ghost points */
123:   DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);
124:   DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);
125:   DMGetLocalToGlobalMapping(grid->da,&ltogm);
126:   ISLocalToGlobalMappingGetIndices(ltogm,&ltog);

128:   /* Evaluate Jacobian of function */
129:   for (j=ys; j<ys+ym; j++) {
130:     row = (j - Ys)*Xm + xs - Xs - 1;
131:     for (i=xs; i<xs+xm; i++) {
132:       row++;
133:       grow = ltog[row];
134:       if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
135:         v[0] = -hxdhy; col[0] = ltog[row - Xm];
136:         v[1] = -hydhx; col[1] = ltog[row - 1];
137:         v[2] = two*(hydhx + hxdhy); col[2] = grow;
138:         v[3] = -hydhx; col[3] = ltog[row + 1];
139:         v[4] = -hxdhy; col[4] = ltog[row + Xm];
140:         MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);
141:       } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
142:         value = .5*two*(hydhx + hxdhy);
143:         MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
144:       } else {
145:         value = .25*two*(hydhx + hxdhy);
146:         MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
147:       }
148:     }
149:   }
150:   ISLocalToGlobalMappingRestoreIndices(ltogm,&ltog);
151:   MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
152:   MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
153:   return 0;
154: }

156: /*TEST

158:     test:
159:       args: -ksp_monitor_short

161:     test:
162:       suffix: 2
163:       args:  -ksp_monitor_short
164:       nsize: 3

166:     test:
167:       suffix: ml_1
168:       args:  -ksp_monitor_short -pc_type ml -mat_no_inode
169:       nsize: 3
170:       requires: ml

172:     test:
173:       suffix: ml_2
174:       args:  -ksp_monitor_short -pc_type ml -mat_no_inode -ksp_max_it 3
175:       nsize: 3
176:       requires: ml

178:     test:
179:       suffix: ml_3
180:       args:  -ksp_monitor_short -pc_type ml -mat_no_inode -pc_mg_type ADDITIVE -ksp_max_it 7
181:       nsize: 1
182:       requires: ml

184: TEST*/