Actual source code: ex41.c
petsc-3.9.3 2018-07-02
2: static char help[] ="Tests sequential and parallel MatMatMatMult() and MatPtAP(). Modified from ex96.c \n\
3: -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
4: -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
5: -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
6: -Npx <npx>, where <npx> = number of processors in the x-direction\n\
7: -Npy <npy>, where <npy> = number of processors in the y-direction\n\
8: -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";
10: /*
11: Example of usage: mpiexec -n 3 ./ex41 -Mx 10 -My 10 -Mz 10
12: */
14: #include <petscdm.h>
15: #include <petscdmda.h>
17: /* User-defined application contexts */
18: typedef struct {
19: PetscInt mx,my,mz; /* number grid points in x, y and z direction */
20: Vec localX,localF; /* local vectors with ghost region */
21: DM da;
22: Vec x,b,r; /* global vectors */
23: Mat J; /* Jacobian on grid */
24: } GridCtx;
25: typedef struct {
26: GridCtx fine;
27: GridCtx coarse;
28: PetscInt ratio;
29: Mat Ii; /* interpolation from coarse to fine */
30: } AppCtx;
32: #define COARSE_LEVEL 0
33: #define FINE_LEVEL 1
35: /*
36: Mm_ratio - ration of grid lines between fine and coarse grids.
37: */
38: int main(int argc,char **argv)
39: {
41: AppCtx user;
42: PetscMPIInt size,rank;
43: PetscInt m,n,M,N,i,nrows;
44: PetscScalar one = 1.0;
45: PetscReal fill=2.0;
46: Mat A,P,R,C,PtAP;
47: PetscScalar *array;
48: PetscRandom rdm;
49: PetscBool Test_3D=PETSC_FALSE,flg;
50: const PetscInt *ia,*ja;
52: PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr;
53: MPI_Comm_size(PETSC_COMM_WORLD,&size);
54: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
56: /* Get size of fine grids and coarse grids */
57: user.ratio = 2;
58: user.coarse.mx = 4; user.coarse.my = 4; user.coarse.mz = 4;
60: PetscOptionsGetInt(NULL,NULL,"-Mx",&user.coarse.mx,NULL);
61: PetscOptionsGetInt(NULL,NULL,"-My",&user.coarse.my,NULL);
62: PetscOptionsGetInt(NULL,NULL,"-Mz",&user.coarse.mz,NULL);
63: PetscOptionsGetInt(NULL,NULL,"-ratio",&user.ratio,NULL);
64: if (user.coarse.mz) Test_3D = PETSC_TRUE;
66: user.fine.mx = user.ratio*(user.coarse.mx-1)+1;
67: user.fine.my = user.ratio*(user.coarse.my-1)+1;
68: user.fine.mz = user.ratio*(user.coarse.mz-1)+1;
70: if (!rank) {
71: if (!Test_3D) {
72: PetscPrintf(PETSC_COMM_SELF,"coarse grids: %D %D; fine grids: %D %D\n",user.coarse.mx,user.coarse.my,user.fine.mx,user.fine.my);
73: } else {
74: PetscPrintf(PETSC_COMM_SELF,"coarse grids: %D %D %D; fine grids: %D %D %D\n",user.coarse.mx,user.coarse.my,user.coarse.mz,user.fine.mx,user.fine.my,user.fine.mz);
75: }
76: }
78: /* Set up distributed array for fine grid */
79: if (!Test_3D) {
80: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.fine.da);
81: } else {
82: DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,user.fine.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,
83: 1,1,NULL,NULL,NULL,&user.fine.da);
84: }
85: DMSetFromOptions(user.fine.da);
86: DMSetUp(user.fine.da);
88: /* Create and set A at fine grids */
89: DMSetMatType(user.fine.da,MATAIJ);
90: DMCreateMatrix(user.fine.da,&A);
91: MatGetLocalSize(A,&m,&n);
92: MatGetSize(A,&M,&N);
94: /* set val=one to A (replace with random values!) */
95: PetscRandomCreate(PETSC_COMM_WORLD,&rdm);
96: PetscRandomSetFromOptions(rdm);
97: if (size == 1) {
98: MatGetRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
99: if (flg) {
100: MatSeqAIJGetArray(A,&array);
101: for (i=0; i<ia[nrows]; i++) array[i] = one;
102: MatSeqAIJRestoreArray(A,&array);
103: }
104: MatRestoreRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
105: } else {
106: Mat AA,AB;
107: MatMPIAIJGetSeqAIJ(A,&AA,&AB,NULL);
108: MatGetRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
109: if (flg) {
110: MatSeqAIJGetArray(AA,&array);
111: for (i=0; i<ia[nrows]; i++) array[i] = one;
112: MatSeqAIJRestoreArray(AA,&array);
113: }
114: MatRestoreRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
115: MatGetRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
116: if (flg) {
117: MatSeqAIJGetArray(AB,&array);
118: for (i=0; i<ia[nrows]; i++) array[i] = one;
119: MatSeqAIJRestoreArray(AB,&array);
120: }
121: MatRestoreRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
122: }
123: /* Set up distributed array for coarse grid */
124: if (!Test_3D) {
125: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.coarse.da);
126: } else {
127: DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,user.coarse.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,NULL,&user.coarse.da);
128: }
129: DMSetFromOptions(user.coarse.da);
130: DMSetUp(user.coarse.da);
132: /* Create interpolation between the fine and coarse grids */
133: DMCreateInterpolation(user.coarse.da,user.fine.da,&P,NULL);
135: /* Get R = P^T */
136: MatTranspose(P,MAT_INITIAL_MATRIX,&R);
138: /* C = R*A*P */
139: MatMatMatMult(R,A,P,MAT_INITIAL_MATRIX,fill,&C);
140: MatMatMatMult(R,A,P,MAT_REUSE_MATRIX,fill,&C);
142: /* Test C == PtAP */
143: MatPtAP(A,P,MAT_INITIAL_MATRIX,fill,&PtAP);
144: MatPtAP(A,P,MAT_REUSE_MATRIX,fill,&PtAP);
145: MatEqual(C,PtAP,&flg);
146: if (!flg) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_PLIB,"Matrices are not equal");
147: MatDestroy(&PtAP);
149: /* Clean up */
150: MatDestroy(&A);
151: PetscRandomDestroy(&rdm);
152: DMDestroy(&user.fine.da);
153: DMDestroy(&user.coarse.da);
154: MatDestroy(&P);
155: MatDestroy(&R);
156: MatDestroy(&C);
157: PetscFinalize();
158: return ierr;
159: }
162: /*TEST
164: test:
166: test:
167: suffix: 2
168: nsize: 2
170: TEST*/