Actual source code: ex37.c
petsc-3.12.1 2019-10-22
2: static char help[] = "Test MatGetMultiProcBlock() and MatCreateRedundantMatrix() \n\
3: Reads a PETSc matrix and vector from a file and solves a linear system.\n\n";
4: /*
5: Example:
6: mpiexec -n 4 ./ex37 -f <input_file> -nsubcomm 2 -psubcomm_view -subcomm_type <1 or 2>
7: */
9: #include <petscksp.h>
10: #include <petscsys.h>
12: int main(int argc,char **args)
13: {
14: KSP subksp;
15: Mat A,subA;
16: Vec x,b,u,subb,subx,subu;
17: PetscViewer fd;
18: char file[PETSC_MAX_PATH_LEN];
19: PetscBool flg;
21: PetscInt i,m,n,its;
22: PetscReal norm;
23: PetscMPIInt rank,size;
24: MPI_Comm comm,subcomm;
25: PetscSubcomm psubcomm;
26: PetscInt nsubcomm=1,id;
27: PetscScalar *barray,*xarray,*uarray,*array,one=1.0;
28: PetscInt type=1;
30: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
31: /* Load the matrix */
32: PetscOptionsGetString(NULL,NULL,"-f",file,PETSC_MAX_PATH_LEN,&flg);
33: if (!flg) SETERRQ(PETSC_COMM_WORLD,1,"Must indicate binary file with the -f option");
34: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
36: /* Load the matrix; then destroy the viewer.*/
37: MatCreate(PETSC_COMM_WORLD,&A);
38: MatLoad(A,fd);
39: PetscViewerDestroy(&fd);
41: PetscObjectGetComm((PetscObject)A,&comm);
42: MPI_Comm_size(comm,&size);
43: MPI_Comm_rank(comm,&rank);
45: /* Create rhs vector b */
46: MatGetLocalSize(A,&m,NULL);
47: VecCreate(PETSC_COMM_WORLD,&b);
48: VecSetSizes(b,m,PETSC_DECIDE);
49: VecSetFromOptions(b);
50: VecSet(b,one);
52: VecDuplicate(b,&x);
53: VecDuplicate(b,&u);
54: VecSet(x,0.0);
56: /* Test MatGetMultiProcBlock() */
57: PetscOptionsGetInt(NULL,NULL,"-nsubcomm",&nsubcomm,NULL);
58: PetscOptionsGetInt(NULL,NULL,"-subcomm_type",&type,NULL);
60: PetscSubcommCreate(comm,&psubcomm);
61: PetscSubcommSetNumber(psubcomm,nsubcomm);
62: if (type == PETSC_SUBCOMM_GENERAL) { /* user provides color, subrank and duprank */
63: PetscMPIInt color,subrank,duprank,subsize;
64: duprank = size-1 - rank;
65: subsize = size/nsubcomm;
66: if (subsize*nsubcomm != size) SETERRQ2(comm,PETSC_ERR_SUP,"This example requires nsubcomm %D divides size %D",nsubcomm,size);
67: color = duprank/subsize;
68: subrank = duprank - color*subsize;
69: PetscSubcommSetTypeGeneral(psubcomm,color,subrank);
70: } else if (type == PETSC_SUBCOMM_CONTIGUOUS) {
71: PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS);
72: } else if (type == PETSC_SUBCOMM_INTERLACED) {
73: PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_INTERLACED);
74: } else SETERRQ1(psubcomm->parent,PETSC_ERR_SUP,"PetscSubcommType %D is not supported yet",type);
75: PetscSubcommSetFromOptions(psubcomm);
76: subcomm = PetscSubcommChild(psubcomm);
78: /* Test MatCreateRedundantMatrix() */
79: if (size > 1) {
81: PetscMPIInt subrank=-1,color=-1;
82: MPI_Comm dcomm;
84: if (rank == 0) {
85: color = 0; subrank = 0;
86: } else if (rank == 1) {
87: color = 0; subrank = 1;
88: } else {
89: color = 1; subrank = 0;
90: }
92: PetscCommDuplicate(PETSC_COMM_WORLD,&dcomm,NULL);
93: MPI_Comm_split(dcomm,color,subrank,&subcomm);
95: MatCreate(subcomm,&subA);
96: MatSetSizes(subA,PETSC_DECIDE,PETSC_DECIDE,10,10);
97: MatSetFromOptions(subA);
98: MatSetUp(subA);
99: MatAssemblyBegin(subA,MAT_FINAL_ASSEMBLY);
100: MatAssemblyEnd(subA,MAT_FINAL_ASSEMBLY);
101: MatZeroEntries(subA);
103: /* Test MatMult() */
104: MatCreateVecs(subA,&subx,&subb);
105: VecSet(subx,1.0);
106: MatMult(subA,subx,subb);
108: VecDestroy(&subx);
109: VecDestroy(&subb);
110: MatDestroy(&subA);
111: PetscCommDestroy(&dcomm);
112: }
114: /* Create subA */
115: MatGetMultiProcBlock(A,subcomm,MAT_INITIAL_MATRIX,&subA);
116: MatGetMultiProcBlock(A,subcomm,MAT_REUSE_MATRIX,&subA);
118: /* Create sub vectors without arrays. Place b's and x's local arrays into subb and subx */
119: MatGetLocalSize(subA,&m,&n);
120: VecCreateMPIWithArray(subcomm,1,m,PETSC_DECIDE,NULL,&subb);
121: VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&subx);
122: VecCreateMPIWithArray(subcomm,1,n,PETSC_DECIDE,NULL,&subu);
124: VecGetArray(b,&barray);
125: VecGetArray(x,&xarray);
126: VecGetArray(u,&uarray);
127: VecPlaceArray(subb,barray);
128: VecPlaceArray(subx,xarray);
129: VecPlaceArray(subu,uarray);
131: /* Create linear solvers associated with subA */
132: KSPCreate(subcomm,&subksp);
133: KSPSetOperators(subksp,subA,subA);
134: KSPSetFromOptions(subksp);
136: /* Solve sub systems */
137: KSPSolve(subksp,subb,subx);
138: KSPGetIterationNumber(subksp,&its);
140: /* check residual */
141: MatMult(subA,subx,subu);
142: VecAXPY(subu,-1.0,subb);
143: VecNorm(u,NORM_2,&norm);
144: if (norm > 1.e-4 && !rank) {
145: PetscPrintf(PETSC_COMM_WORLD,"[%D] Number of iterations = %3D\n",rank,its);
146: PetscPrintf(PETSC_COMM_WORLD,"Error: Residual norm of each block |subb - subA*subx |= %g\n",(double)norm);
147: }
148: VecResetArray(subb);
149: VecResetArray(subx);
150: VecResetArray(subu);
152: PetscOptionsGetInt(NULL,NULL,"-subvec_view",&id,&flg);
153: if (flg && rank == id) {
154: PetscPrintf(PETSC_COMM_SELF,"[%D] subb:\n", rank);
155: VecGetArray(subb,&array);
156: for (i=0; i<m; i++) {PetscPrintf(PETSC_COMM_SELF,"%g\n",(double)PetscRealPart(array[i]));}
157: VecRestoreArray(subb,&array);
158: PetscPrintf(PETSC_COMM_SELF,"[%D] subx:\n", rank);
159: VecGetArray(subx,&array);
160: for (i=0; i<m; i++) {PetscPrintf(PETSC_COMM_SELF,"%g\n",(double)PetscRealPart(array[i]));}
161: VecRestoreArray(subx,&array);
162: }
164: VecRestoreArray(x,&xarray);
165: VecRestoreArray(b,&barray);
166: VecRestoreArray(u,&uarray);
167: MatDestroy(&subA);
168: VecDestroy(&subb);
169: VecDestroy(&subx);
170: VecDestroy(&subu);
171: KSPDestroy(&subksp);
172: PetscSubcommDestroy(&psubcomm);
173: if (size > 1) {
174: MPI_Comm_free(&subcomm);
175: }
176: MatDestroy(&A); VecDestroy(&b);
177: VecDestroy(&u); VecDestroy(&x);
179: PetscFinalize();
180: return ierr;
181: }
183: /*TEST
185: test:
186: args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 1
187: requires: datafilespath !complex double !define(PETSC_USE_64BIT_INDICES)
188: output_file: output/ex37.out
190: test:
191: suffix: 2
192: args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2
193: requires: datafilespath !complex double !define(PETSC_USE_64BIT_INDICES)
194: nsize: 4
195: output_file: output/ex37.out
197: test:
198: suffix: mumps
199: args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -pc_factor_mat_solver_type mumps -pc_type lu
200: requires: datafilespath mumps !complex double !define(PETSC_USE_64BIT_INDICES)
201: nsize: 4
202: output_file: output/ex37.out
204: test:
205: suffix: 3
206: nsize: 4
207: args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 0
208: requires: datafilespath !complex double !define(PETSC_USE_64BIT_INDICES)
209: output_file: output/ex37.out
211: test:
212: suffix: 4
213: nsize: 4
214: args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 1
215: requires: datafilespath !complex double !define(PETSC_USE_64BIT_INDICES)
216: output_file: output/ex37.out
218: test:
219: suffix: 5
220: nsize: 4
221: args: -f ${DATAFILESPATH}/matrices/small -nsubcomm 2 -subcomm_type 2
222: requires: datafilespath !complex double !define(PETSC_USE_64BIT_INDICES)
223: output_file: output/ex37.out
225: TEST*/