Actual source code: ex41.c
petsc-3.4.2 2013-07-02
2: static char help[] ="Tests sequential and parallel MatMatMatMult() and MatPtAP(). Modified from ex96.c \n\
3: -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
4: -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
5: -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
6: -Npx <npx>, where <npx> = number of processors in the x-direction\n\
7: -Npy <npy>, where <npy> = number of processors in the y-direction\n\
8: -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";
10: /*
11: Example of usage: mpiexec -n 3 ./ex41 -Mx 10 -My 10 -Mz 10
12: */
14: #include <petscdmda.h>
15: #include <../src/mat/impls/aij/seq/aij.h>
16: #include <../src/mat/impls/aij/mpi/mpiaij.h>
18: /* User-defined application contexts */
19: typedef struct {
20: PetscInt mx,my,mz; /* number grid points in x, y and z direction */
21: Vec localX,localF; /* local vectors with ghost region */
22: DM da;
23: Vec x,b,r; /* global vectors */
24: Mat J; /* Jacobian on grid */
25: } GridCtx;
26: typedef struct {
27: GridCtx fine;
28: GridCtx coarse;
29: PetscInt ratio;
30: Mat Ii; /* interpolation from coarse to fine */
31: } AppCtx;
33: #define COARSE_LEVEL 0
34: #define FINE_LEVEL 1
36: /*
37: Mm_ratio - ration of grid lines between fine and coarse grids.
38: */
41: int main(int argc,char **argv)
42: {
44: AppCtx user;
45: PetscMPIInt size,rank;
46: PetscInt m,n,M,N,i,nrows;
47: PetscScalar one = 1.0;
48: PetscReal fill=2.0;
49: Mat A,P,R,C,PtAP;
50: PetscScalar *array;
51: PetscRandom rdm;
52: PetscBool Test_3D=PETSC_FALSE,flg;
54: PetscInitialize(&argc,&argv,NULL,help);
55: MPI_Comm_size(PETSC_COMM_WORLD,&size);
56: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
58: /* Get size of fine grids and coarse grids */
59: user.ratio = 2;
60: user.coarse.mx = 2; user.coarse.my = 2; user.coarse.mz = 0;
62: PetscOptionsGetInt(NULL,"-Mx",&user.coarse.mx,NULL);
63: PetscOptionsGetInt(NULL,"-My",&user.coarse.my,NULL);
64: PetscOptionsGetInt(NULL,"-Mz",&user.coarse.mz,NULL);
65: PetscOptionsGetInt(NULL,"-ratio",&user.ratio,NULL);
66: if (user.coarse.mz) Test_3D = PETSC_TRUE;
68: user.fine.mx = user.ratio*(user.coarse.mx-1)+1;
69: user.fine.my = user.ratio*(user.coarse.my-1)+1;
70: user.fine.mz = user.ratio*(user.coarse.mz-1)+1;
72: if (!rank) {
73: if (!Test_3D) {
74: PetscPrintf(PETSC_COMM_SELF,"coarse grids: %d %d; fine grids: %d %d\n",user.coarse.mx,user.coarse.my,user.fine.mx,user.fine.my);
75: } else {
76: PetscPrintf(PETSC_COMM_SELF,"coarse grids: %d %d %d; fine grids: %d %d %d\n",user.coarse.mx,user.coarse.my,user.coarse.mz,user.fine.mx,user.fine.my,user.fine.mz);
77: }
78: }
80: /* Set up distributed array for fine grid */
81: if (!Test_3D) {
82: DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,
83: user.fine.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.fine.da);
84: } else {
85: DMDACreate3d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,
86: user.fine.mx,user.fine.my,user.fine.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,
87: 1,1,NULL,NULL,NULL,&user.fine.da);
88: }
90: /* Create and set A at fine grids */
91: DMCreateMatrix(user.fine.da,MATAIJ,&A);
92: MatGetLocalSize(A,&m,&n);
93: MatGetSize(A,&M,&N);
95: /* set val=one to A (replace with random values!) */
96: PetscRandomCreate(PETSC_COMM_WORLD,&rdm);
97: PetscRandomSetFromOptions(rdm);
98: if (size == 1) {
99: const PetscInt *ia,*ja;
100: MatGetRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
101: if (flg) {
102: MatSeqAIJGetArray(A,&array);
103: for (i=0; i<ia[nrows]; i++) array[i] = one;
104: MatSeqAIJRestoreArray(A,&array);
105: }
106: MatRestoreRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
107: } else {
108: Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data;
109: Mat_SeqAIJ *a = (Mat_SeqAIJ*)(aij->A)->data, *b=(Mat_SeqAIJ*)(aij->B)->data;
110: /* A_part */
111: for (i=0; i<a->i[m]; i++) a->a[i] = one;
112: /* B_part */
113: for (i=0; i<b->i[m]; i++) b->a[i] = one;
115: }
116: /* if (!rank) printf("A:\n"); */
117: /* MatView(A, PETSC_VIEWER_STDOUT_WORLD); */
119: /* Set up distributed array for coarse grid */
120: if (!Test_3D) {
121: DMDACreate2d(PETSC_COMM_WORLD, DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,
122: user.coarse.my,PETSC_DECIDE,PETSC_DECIDE,1,1,NULL,NULL,&user.coarse.da);
123: } else {
124: DMDACreate3d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,
125: user.coarse.mx,user.coarse.my,user.coarse.mz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,
126: 1,1,NULL,NULL,NULL,&user.coarse.da);
127: }
129: /* Create interpolation between the fine and coarse grids */
130: DMCreateInterpolation(user.coarse.da,user.fine.da,&P,NULL);
131: /* if (!rank) printf("P:\n"); */
132: /* MatView(P, PETSC_VIEWER_STDOUT_WORLD); */
134: /* Get R = P^T */
135: MatTranspose(P,MAT_INITIAL_MATRIX,&R);
136: /* if (!rank) printf("R:\n"); */
137: /* MatView(R, PETSC_VIEWER_STDOUT_WORLD); */
139: /* C = R*A*P */
140: MatMatMatMult(R,A,P,MAT_INITIAL_MATRIX,fill,&C);
141: MatMatMatMult(R,A,P,MAT_REUSE_MATRIX,fill,&C);
143: /* Test C == PtAP */
144: MatPtAP(A,P,MAT_INITIAL_MATRIX,fill,&PtAP);
145: MatPtAP(A,P,MAT_REUSE_MATRIX,fill,&PtAP);
146: MatEqual(C,PtAP,&flg);
147: if (!flg) printf("RAP != PtAP\n");
148: MatDestroy(&PtAP);
150: /* Clean up */
151: MatDestroy(&A);
152: PetscRandomDestroy(&rdm);
153: DMDestroy(&user.fine.da);
154: DMDestroy(&user.coarse.da);
155: MatDestroy(&P);
156: MatDestroy(&R);
157: MatDestroy(&C);
158: PetscFinalize();
159: return 0;
160: }