Actual source code: ex16.c
1: static char help[]= "Test PetscSFCreateByMatchingIndices\n\n";
3: #include <petsc.h>
4: #include <petscsf.h>
6: /* Test PetscSFCreateByMatchingIndices.
8: testnum 0:
10: rank : 0 1 2
11: numRootIndices : 3 1 1
12: rootIndices : [1 0 2] [3] [3]
13: rootLocalOffset : 100 200 300
14: layout : [0 1] [2] [3]
15: numLeafIndices : 1 1 2
16: leafIndices : [0] [2] [0 3]
17: leafLocalOffset : 400 500 600
19: would build the following SF:
21: [0] 400 <- (0,101)
22: [1] 500 <- (0,102)
23: [2] 600 <- (0,101)
24: [2] 601 <- (2,300)
26: testnum 1:
28: rank : 0 1 2
29: numRootIndices : 3 1 1
30: rootIndices : [1 0 2] [3] [3]
31: rootLocalOffset : 100 200 300
32: layout : [0 1] [2] [3]
33: numLeafIndices : numRootIndices numRootIndices numRootIndices
34: leafIndices : rootIndices rootIndices rootIndices
35: leafLocalOffset : rootLocalOffset rootLocalOffset rootLocalOffset
37: would build the following SF:
39: [1] 200 <- (2,300)
41: testnum 2:
43: No one claims ownership of global index 1, but no one needs it.
45: rank : 0 1 2
46: numRootIndices : 2 1 1
47: rootIndices : [0 2] [3] [3]
48: rootLocalOffset : 100 200 300
49: layout : [0 1] [2] [3]
50: numLeafIndices : 1 1 2
51: leafIndices : [0] [2] [0 3]
52: leafLocalOffset : 400 500 600
54: would build the following SF:
56: [0] 400 <- (0,100)
57: [1] 500 <- (0,101)
58: [2] 600 <- (0,100)
59: [2] 601 <- (2,300)
61: */
63: int main(int argc, char **argv)
64: {
65: PetscSF sf;
66: PetscLayout layout;
67: PetscInt N, n;
68: PetscInt nA=-1, *A, offsetA=-1;
69: PetscInt nB=-1, *B, offsetB=-1;
70: PetscMPIInt size, rank;
71: PetscInt testnum;
72: PetscErrorCode ierr;
74: PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr;
75: PetscOptionsGetInt(NULL,NULL, "-testnum", &testnum, NULL);
76: MPI_Comm_size(PETSC_COMM_WORLD,&size);
77: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
78: if (size != 3) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 3 MPI processes");
80: switch (testnum) {
81: case 0:
82: N = 4;
83: n = PETSC_DECIDE;
84: switch (rank) {
85: case 0: nA = 3; offsetA = 100; nB = 1; offsetB = 400; break;
86: case 1: nA = 1; offsetA = 200; nB = 1; offsetB = 500; break;
87: case 2: nA = 1; offsetA = 300; nB = 2; offsetB = 600; break;
88: default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 3 MPI processes");
89: }
90: PetscMalloc1(nA, &A);
91: PetscMalloc1(nB, &B);
92: switch (rank) {
93: case 0:
94: A[0] = 1; A[1] = 0; A[2] = 2;
95: B[0] = 0;
96: break;
97: case 1:
98: A[0] = 3;
99: B[0] = 2;
100: break;
101: case 2:
102: A[0] = 3;
103: B[0] = 0; B[1] = 3;
104: break;
105: default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 3 MPI processes");
106: }
107: break;
108: case 1:
109: N = 4;
110: n = PETSC_DECIDE;
111: switch (rank) {
112: case 0: nA = 3; offsetA = 100; break;
113: case 1: nA = 1; offsetA = 200; break;
114: case 2: nA = 1; offsetA = 300; break;
115: default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 3 MPI processes");
116: }
117: PetscMalloc1(nA, &A);
118: switch (rank) {
119: case 0:
120: A[0] = 1; A[1] = 0; A[2] = 2;
121: break;
122: case 1:
123: A[0] = 3;
124: break;
125: case 2:
126: A[0] = 3;
127: break;
128: default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 3 MPI processes");
129: }
130: nB = nA;
131: B = A;
132: offsetB = offsetA;
133: break;
134: case 2:
135: N = 4;
136: n = PETSC_DECIDE;
137: switch (rank) {
138: case 0: nA = 2; offsetA = 100; nB = 1; offsetB = 400; break;
139: case 1: nA = 1; offsetA = 200; nB = 1; offsetB = 500; break;
140: case 2: nA = 1; offsetA = 300; nB = 2; offsetB = 600; break;
141: default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 3 MPI processes");
142: }
143: PetscMalloc1(nA, &A);
144: PetscMalloc1(nB, &B);
145: switch (rank) {
146: case 0:
147: A[0] = 0; A[1] = 2;
148: B[0] = 0;
149: break;
150: case 1:
151: A[0] = 3;
152: B[0] = 2;
153: break;
154: case 2:
155: A[0] = 3;
156: B[0] = 0; B[1] = 3;
157: break;
158: default: SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run with 3 MPI processes");
159: }
160: break;
161: }
162: PetscLayoutCreate(PETSC_COMM_WORLD, &layout);
163: PetscLayoutSetSize(layout, N);
164: PetscLayoutSetLocalSize(layout, n);
165: PetscLayoutSetBlockSize(layout, 1);
166: PetscSFCreateByMatchingIndices(layout, nA, A, NULL, offsetA, nB, B, NULL, offsetB, NULL, &sf);
167: PetscLayoutDestroy(&layout);
168: PetscFree(A);
169: if (testnum != 1) {PetscFree(B);}
170: PetscObjectSetName((PetscObject)sf, "sf");
171: PetscSFView(sf, NULL);
172: PetscSFDestroy(&sf);
174: PetscFinalize();
175: return ierr;
176: }
178: /*TEST
180: test:
181: suffix: 0
182: nsize: 3
183: args: -testnum 0
185: test:
186: suffix: 1
187: nsize: 3
188: args: -testnum 1
190: test:
191: suffix: 2
192: nsize: 3
193: args: -testnum 2
195: TEST*/