Actual source code: telescope_coarsedm.c
2: #include <petsc/private/matimpl.h>
3: #include <petsc/private/pcimpl.h>
4: #include <petsc/private/dmimpl.h>
5: #include <petscksp.h>
6: #include <petscdm.h>
7: #include <petscdmda.h>
8: #include <petscdmshell.h>
10: #include "../src/ksp/pc/impls/telescope/telescope.h"
12: static PetscBool cited = PETSC_FALSE;
13: static const char citation[] =
14: "@inproceedings{MaySananRuppKnepleySmith2016,\n"
15: " title = {Extreme-Scale Multigrid Components within PETSc},\n"
16: " author = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
17: " booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
18: " series = {PASC '16},\n"
19: " isbn = {978-1-4503-4126-4},\n"
20: " location = {Lausanne, Switzerland},\n"
21: " pages = {5:1--5:12},\n"
22: " articleno = {5},\n"
23: " numpages = {12},\n"
24: " url = {https://doi.acm.org/10.1145/2929908.2929913},\n"
25: " doi = {10.1145/2929908.2929913},\n"
26: " acmid = {2929913},\n"
27: " publisher = {ACM},\n"
28: " address = {New York, NY, USA},\n"
29: " keywords = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
30: " year = {2016}\n"
31: "}\n";
33: typedef struct {
34: DM dm_fine,dm_coarse; /* these DM's should be topologically identical but use different communicators */
35: Mat permutation;
36: Vec xp;
37: PetscErrorCode (*fp_dm_field_scatter)(DM,Vec,ScatterMode,DM,Vec);
38: PetscErrorCode (*fp_dm_state_scatter)(DM,ScatterMode,DM);
39: void *dmksp_context_determined;
40: void *dmksp_context_user;
41: } PC_Telescope_CoarseDMCtx;
43: PetscErrorCode PCTelescopeSetUp_scatters_CoarseDM(PC pc,PC_Telescope sred,PC_Telescope_CoarseDMCtx *ctx)
44: {
46: Vec xred,yred,xtmp,x,xp;
47: VecScatter scatter;
48: IS isin;
49: Mat B;
50: PetscInt m,bs,st,ed;
51: MPI_Comm comm;
54: PetscObjectGetComm((PetscObject)pc,&comm);
55: PCGetOperators(pc,NULL,&B);
56: MatCreateVecs(B,&x,NULL);
57: MatGetBlockSize(B,&bs);
58: VecDuplicate(x,&xp);
59: m = 0;
60: xred = NULL;
61: yred = NULL;
62: if (PCTelescope_isActiveRank(sred)) {
63: DMCreateGlobalVector(ctx->dm_coarse,&xred);
64: VecDuplicate(xred,&yred);
65: VecGetOwnershipRange(xred,&st,&ed);
66: ISCreateStride(comm,ed-st,st,1,&isin);
67: VecGetLocalSize(xred,&m);
68: } else {
69: VecGetOwnershipRange(x,&st,&ed);
70: ISCreateStride(comm,0,st,1,&isin);
71: }
72: ISSetBlockSize(isin,bs);
73: VecCreate(comm,&xtmp);
74: VecSetSizes(xtmp,m,PETSC_DECIDE);
75: VecSetBlockSize(xtmp,bs);
76: VecSetType(xtmp,((PetscObject)x)->type_name);
77: VecScatterCreate(x,isin,xtmp,NULL,&scatter);
78: sred->xred = xred;
79: sred->yred = yred;
80: sred->isin = isin;
81: sred->scatter = scatter;
82: sred->xtmp = xtmp;
83: ctx->xp = xp;
84: VecDestroy(&x);
85: return(0);
86: }
88: PetscErrorCode PCTelescopeSetUp_CoarseDM(PC pc,PC_Telescope sred)
89: {
90: PC_Telescope_CoarseDMCtx *ctx;
91: DM dm,dm_coarse = NULL;
92: MPI_Comm comm;
93: PetscBool has_perm,has_kspcomputeoperators,using_kspcomputeoperators;
94: PetscErrorCode ierr;
97: PetscInfo(pc,"PCTelescope: setup (CoarseDM)\n");
98: PetscNew(&ctx);
99: sred->dm_ctx = (void*)ctx;
101: PetscObjectGetComm((PetscObject)pc,&comm);
102: PCGetDM(pc,&dm);
103: DMGetCoarseDM(dm,&dm_coarse);
104: ctx->dm_fine = dm;
105: ctx->dm_coarse = dm_coarse;
107: /* attach coarse dm to ksp on sub communicator */
108: if (PCTelescope_isActiveRank(sred)) {
109: KSPSetDM(sred->ksp,ctx->dm_coarse);
110: if (sred->ignore_kspcomputeoperators) {
111: KSPSetDMActive(sred->ksp,PETSC_FALSE);
112: }
113: }
115: /* check if there is a method to provide a permutation */
116: has_perm = PETSC_FALSE;
117: has_kspcomputeoperators = PETSC_FALSE;
118: using_kspcomputeoperators = PETSC_FALSE;
120: /* if no permutation is provided, we must rely on KSPSetComputeOperators */
121: {
122: PetscErrorCode (*dmfine_kspfunc)(KSP,Mat,Mat,void*) = NULL;
123: void *dmfine_kspctx = NULL,*dmcoarse_kspctx = NULL;
124: void *dmfine_appctx = NULL,*dmcoarse_appctx = NULL;
125: void *dmfine_shellctx = NULL,*dmcoarse_shellctx = NULL;
127: DMKSPGetComputeOperators(dm,&dmfine_kspfunc,&dmfine_kspctx);
128: if (dmfine_kspfunc) { has_kspcomputeoperators = PETSC_TRUE; }
130: DMGetApplicationContext(ctx->dm_fine,&dmfine_appctx);
131: DMShellGetContext(ctx->dm_fine,&dmfine_shellctx);
133: /* need to define dmcoarse_kspctx */
134: if (dmfine_kspfunc && !sred->ignore_kspcomputeoperators) {
136: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators fetched from parent DM\n");
137: if (PCTelescope_isActiveRank(sred)) {
138: DMGetApplicationContext(ctx->dm_coarse,&dmcoarse_appctx);
139: DMShellGetContext(ctx->dm_coarse,&dmcoarse_shellctx);
140: }
142: /* Assume that if the fine operator didn't require any context, neither will the coarse */
143: if (!dmfine_kspctx) {
144: dmcoarse_kspctx = NULL;
145: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators using NULL context\n");
146: } else {
148: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators detected non-NULL context from parent DM \n");
149: if (PCTelescope_isActiveRank(sred)) {
151: if (dmfine_kspctx == dmfine_appctx) {
152: dmcoarse_kspctx = dmcoarse_appctx;
153: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators using context from DM->ApplicationContext\n");
154: if (!dmcoarse_kspctx) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Non NULL dmfine->kspctx == dmfine->appctx. NULL dmcoarse->appctx found. Likely this is an error");
155: } else if (dmfine_kspctx == dmfine_shellctx) {
156: dmcoarse_kspctx = dmcoarse_shellctx;
157: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators using context from DMShell->Context\n");
158: if (!dmcoarse_kspctx) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Non NULL dmfine->kspctx == dmfine.shell->ctx. NULL dmcoarse.shell->ctx found. Likely this is an error");
159: }
160: ctx->dmksp_context_determined = dmcoarse_kspctx;
162: /* look for user provided method to fetch the context */
163: {
164: PetscErrorCode (*fp_get_coarsedm_context)(DM,void**) = NULL;
165: void *dmcoarse_context_user = NULL;
166: char dmcoarse_method[PETSC_MAX_PATH_LEN];
168: PetscSNPrintf(dmcoarse_method,sizeof(dmcoarse_method),"PCTelescopeGetCoarseDMKSPContext");
169: PetscObjectQueryFunction((PetscObject)ctx->dm_coarse,dmcoarse_method,&fp_get_coarsedm_context);
170: if (fp_get_coarsedm_context) {
171: PetscInfo(pc,"PCTelescope: Found composed method PCTelescopeGetCoarseDMKSPContext from coarse DM\n");
172: fp_get_coarsedm_context(ctx->dm_coarse,&dmcoarse_context_user);
173: ctx->dmksp_context_user = dmcoarse_context_user;
174: dmcoarse_kspctx = dmcoarse_context_user;
175: } else {
176: PetscInfo(pc,"PCTelescope: Failed to find composed method PCTelescopeGetCoarseDMKSPContext from coarse DM\n");
177: }
178: }
180: if (!dmcoarse_kspctx) {
181: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators failed to determine the context to use on sub-communicator\n");
182: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot determine which context with use for KSPSetComputeOperators() on sub-communicator");
183: }
184: }
185: }
186: }
188: if (dmfine_kspfunc && !sred->ignore_kspcomputeoperators) {
189: using_kspcomputeoperators = PETSC_TRUE;
191: if (PCTelescope_isActiveRank(sred)) {
192: /* sub ksp inherits dmksp_func and context provided by user */
193: KSPSetComputeOperators(sred->ksp,dmfine_kspfunc,dmcoarse_kspctx);
194: /*PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)ctx->dmrepart);*/
195: KSPSetDMActive(sred->ksp,PETSC_TRUE);
196: }
197: }
198: }
200: if (!has_perm && has_kspcomputeoperators && !using_kspcomputeoperators) SETERRQ(comm,PETSC_ERR_SUP,"No method to permute an operator was found on the parent DM. A method for KSPSetComputeOperators() was provided but it was requested to be ignored. Telescope setup cannot proceed");
201: if (!has_perm && !has_kspcomputeoperators) SETERRQ(comm,PETSC_ERR_SUP,"No method to permute an operator was found on the parent DM. No method for KSPSetComputeOperators() was provided. Telescope setup cannot proceed");
203: {
204: char dmfine_method[PETSC_MAX_PATH_LEN];
206: PetscSNPrintf(dmfine_method,sizeof(dmfine_method),"PCTelescopeFieldScatter");
207: PetscObjectQueryFunction((PetscObject)ctx->dm_fine,dmfine_method,&ctx->fp_dm_field_scatter);
209: PetscSNPrintf(dmfine_method,sizeof(dmfine_method),"PCTelescopeStateScatter");
210: PetscObjectQueryFunction((PetscObject)ctx->dm_fine,dmfine_method,&ctx->fp_dm_state_scatter);
211: }
213: if (ctx->fp_dm_state_scatter) {
214: PetscInfo(pc,"PCTelescope: Found composed method PCTelescopeStateScatter from parent DM\n");
215: } else {
216: PetscInfo(pc,"PCTelescope: Failed to find composed method PCTelescopeStateScatter from parent DM\n");
217: }
219: if (ctx->fp_dm_field_scatter) {
220: PetscInfo(pc,"PCTelescope: Found composed method PCTelescopeFieldScatter from parent DM\n");
221: } else {
222: PetscInfo(pc,"PCTelescope: Failed to find composed method PCTelescopeFieldScatter from parent DM\n");
223: SETERRQ(comm,PETSC_ERR_SUP,"No method to scatter fields between the parent DM and coarse DM was found. Must call PetscObjectComposeFunction() with the parent DM. Telescope setup cannot proceed");
224: }
226: /*PCTelescopeSetUp_permutation_CoarseDM(pc,sred,ctx);*/
227: PCTelescopeSetUp_scatters_CoarseDM(pc,sred,ctx);
228: return(0);
229: }
231: PetscErrorCode PCApply_Telescope_CoarseDM(PC pc,Vec x,Vec y)
232: {
233: PC_Telescope sred = (PC_Telescope)pc->data;
234: PetscErrorCode ierr;
235: Vec xred,yred;
236: PC_Telescope_CoarseDMCtx *ctx;
239: ctx = (PC_Telescope_CoarseDMCtx*)sred->dm_ctx;
240: xred = sred->xred;
241: yred = sred->yred;
243: PetscCitationsRegister(citation,&cited);
245: if (ctx->fp_dm_state_scatter) {
246: ctx->fp_dm_state_scatter(ctx->dm_fine,SCATTER_FORWARD,ctx->dm_coarse);
247: }
249: ctx->fp_dm_field_scatter(ctx->dm_fine,x,SCATTER_FORWARD,ctx->dm_coarse,xred);
251: /* solve */
252: if (PCTelescope_isActiveRank(sred)) {
253: KSPSolve(sred->ksp,xred,yred);
254: }
256: ctx->fp_dm_field_scatter(ctx->dm_fine,y,SCATTER_REVERSE,ctx->dm_coarse,yred);
257: return(0);
258: }
260: PetscErrorCode PCTelescopeSubNullSpaceCreate_CoarseDM(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
261: {
262: PetscErrorCode ierr;
263: PetscBool has_const;
264: PetscInt k,n = 0;
265: const Vec *vecs;
266: Vec *sub_vecs = NULL;
267: MPI_Comm subcomm;
268: PC_Telescope_CoarseDMCtx *ctx;
271: ctx = (PC_Telescope_CoarseDMCtx*)sred->dm_ctx;
272: subcomm = sred->subcomm;
273: MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);
275: if (PCTelescope_isActiveRank(sred)) {
276: /* create new vectors */
277: if (n) {
278: VecDuplicateVecs(sred->xred,n,&sub_vecs);
279: }
280: }
282: /* copy entries */
283: for (k=0; k<n; k++) {
284: ctx->fp_dm_field_scatter(ctx->dm_fine,vecs[k],SCATTER_FORWARD,ctx->dm_coarse,sub_vecs[k]);
285: }
287: if (PCTelescope_isActiveRank(sred)) {
288: /* create new (near) nullspace for redundant object */
289: MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);
290: VecDestroyVecs(n,&sub_vecs);
291: }
292: return(0);
293: }
295: PetscErrorCode PCTelescopeMatNullSpaceCreate_CoarseDM(PC pc,PC_Telescope sred,Mat sub_mat)
296: {
297: PetscErrorCode ierr;
298: Mat B;
299: PC_Telescope_CoarseDMCtx *ctx;
302: ctx = (PC_Telescope_CoarseDMCtx*)sred->dm_ctx;
303: PCGetOperators(pc,NULL,&B);
304: {
305: MatNullSpace nullspace,sub_nullspace;
306: MatGetNullSpace(B,&nullspace);
307: if (nullspace) {
308: PetscInfo(pc,"PCTelescope: generating nullspace (CoarseDM)\n");
309: PCTelescopeSubNullSpaceCreate_CoarseDM(pc,sred,nullspace,&sub_nullspace);
311: /* attach any user nullspace removal methods and contexts */
312: if (PCTelescope_isActiveRank(sred)) {
313: void *context = NULL;
314: if (nullspace->remove && !nullspace->rmctx) {
315: MatNullSpaceSetFunction(sub_nullspace,nullspace->remove,context);
316: } else if (nullspace->remove && nullspace->rmctx) {
317: char dmcoarse_method[PETSC_MAX_PATH_LEN];
318: PetscErrorCode (*fp_get_coarsedm_context)(DM,void**) = NULL;
320: PetscSNPrintf(dmcoarse_method,sizeof(dmcoarse_method),"PCTelescopeGetCoarseDMNullSpaceUserContext");
321: PetscObjectQueryFunction((PetscObject)ctx->dm_coarse,dmcoarse_method,&fp_get_coarsedm_context);
322: if (!context) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Propagation of user null-space removal method with non-NULL context requires the coarse DM be composed with a function named \"%s\"",dmcoarse_method);
323: MatNullSpaceSetFunction(sub_nullspace,nullspace->remove,context);
324: }
325: }
327: if (PCTelescope_isActiveRank(sred)) {
328: MatSetNullSpace(sub_mat,sub_nullspace);
329: MatNullSpaceDestroy(&sub_nullspace);
330: }
331: }
332: }
333: {
334: MatNullSpace nearnullspace,sub_nearnullspace;
335: MatGetNearNullSpace(B,&nearnullspace);
336: if (nearnullspace) {
337: PetscInfo(pc,"PCTelescope: generating near nullspace (CoarseDM)\n");
338: PCTelescopeSubNullSpaceCreate_CoarseDM(pc,sred,nearnullspace,&sub_nearnullspace);
340: /* attach any user nullspace removal methods and contexts */
341: if (PCTelescope_isActiveRank(sred)) {
342: void *context = NULL;
343: if (nearnullspace->remove && !nearnullspace->rmctx) {
344: MatNullSpaceSetFunction(sub_nearnullspace,nearnullspace->remove,context);
345: } else if (nearnullspace->remove && nearnullspace->rmctx) {
346: char dmcoarse_method[PETSC_MAX_PATH_LEN];
347: PetscErrorCode (*fp_get_coarsedm_context)(DM,void**) = NULL;
349: PetscSNPrintf(dmcoarse_method,sizeof(dmcoarse_method),"PCTelescopeGetCoarseDMNearNullSpaceUserContext");
350: PetscObjectQueryFunction((PetscObject)ctx->dm_coarse,dmcoarse_method,&fp_get_coarsedm_context);
351: if (!context) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Propagation of user near null-space removal method with non-NULL context requires the coarse DM be composed with a function named \"%s\"",dmcoarse_method);
352: MatNullSpaceSetFunction(sub_nearnullspace,nearnullspace->remove,context);
353: }
354: }
356: if (PCTelescope_isActiveRank(sred)) {
357: MatSetNearNullSpace(sub_mat,sub_nearnullspace);
358: MatNullSpaceDestroy(&sub_nearnullspace);
359: }
360: }
361: }
362: return(0);
363: }
365: PetscErrorCode PCReset_Telescope_CoarseDM(PC pc)
366: {
367: PetscErrorCode ierr;
368: PC_Telescope sred = (PC_Telescope)pc->data;
369: PC_Telescope_CoarseDMCtx *ctx;
372: ctx = (PC_Telescope_CoarseDMCtx*)sred->dm_ctx;
373: ctx->dm_fine = NULL; /* since I did not increment the ref counter we set these to NULL */
374: ctx->dm_coarse = NULL; /* since I did not increment the ref counter we set these to NULL */
375: ctx->permutation = NULL; /* this will be fetched from the dm so no need to call destroy */
376: VecDestroy(&ctx->xp);
377: ctx->fp_dm_field_scatter = NULL;
378: ctx->fp_dm_state_scatter = NULL;
379: ctx->dmksp_context_determined = NULL;
380: ctx->dmksp_context_user = NULL;
381: return(0);
382: }
384: PetscErrorCode PCApplyRichardson_Telescope_CoarseDM(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol,PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
385: {
386: PC_Telescope sred = (PC_Telescope)pc->data;
387: PetscErrorCode ierr;
388: Vec yred = NULL;
389: PetscBool default_init_guess_value = PETSC_FALSE;
390: PC_Telescope_CoarseDMCtx *ctx;
393: ctx = (PC_Telescope_CoarseDMCtx*)sred->dm_ctx;
394: yred = sred->yred;
396: if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope_CoarseDM only supports max_it = 1");
397: *reason = (PCRichardsonConvergedReason)0;
399: if (!zeroguess) {
400: PetscInfo(pc,"PCTelescopeCoarseDM: Scattering y for non-zero-initial guess\n");
402: ctx->fp_dm_field_scatter(ctx->dm_fine,y,SCATTER_FORWARD,ctx->dm_coarse,yred);
403: }
405: if (PCTelescope_isActiveRank(sred)) {
406: KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);
407: if (!zeroguess) {KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);}
408: }
410: PCApply_Telescope_CoarseDM(pc,x,y);
412: if (PCTelescope_isActiveRank(sred)) {
413: KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);
414: }
416: if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
417: *outits = 1;
418: return(0);
419: }