Actual source code: partition.c
2: #include <petsc/private/matimpl.h>
4: /* Logging support */
5: PetscClassId MAT_PARTITIONING_CLASSID;
7: /*
8: Simplest partitioning, keeps the current partitioning.
9: */
10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part,IS *partitioning)
11: {
13: PetscInt m;
14: PetscMPIInt rank,size;
17: MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
18: if (part->n != size) {
19: const char *prefix;
20: PetscObjectGetOptionsPrefix((PetscObject)part,&prefix);
21: SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor",prefix ? prefix : "");
22: }
23: MPI_Comm_rank(PetscObjectComm((PetscObject)part),&rank);
25: MatGetLocalSize(part->adj,&m,NULL);
26: ISCreateStride(PetscObjectComm((PetscObject)part),m,rank,0,partitioning);
27: return(0);
28: }
30: /*
31: partition an index to rebalance the computation
32: */
33: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part,IS *partitioning)
34: {
36: PetscInt m,M,nparts,*indices,r,d,*parts,i,start,end,loc;
39: MatGetSize(part->adj,&M,NULL);
40: MatGetLocalSize(part->adj,&m,NULL);
41: nparts = part->n;
42: PetscMalloc1(nparts,&parts);
43: d = M/nparts;
44: for (i=0; i<nparts; i++) parts[i] = d;
45: r = M%nparts;
46: for (i=0; i<r; i++) parts[i] += 1;
47: for (i=1; i<nparts; i++) parts[i] += parts[i-1];
48: PetscMalloc1(m,&indices);
49: MatGetOwnershipRange(part->adj,&start,&end);
50: for (i=start; i<end; i++) {
51: PetscFindInt(i,nparts,parts,&loc);
52: if (loc<0) loc = -(loc+1);
53: else loc = loc+1;
54: indices[i-start] = loc;
55: }
56: PetscFree(parts);
57: ISCreateGeneral(PetscObjectComm((PetscObject)part),m,indices,PETSC_OWN_POINTER,partitioning);
58: return(0);
59: }
61: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part,IS *partitioning)
62: {
64: PetscInt cell,n,N,p,rstart,rend,*color;
65: PetscMPIInt size;
68: MPI_Comm_size(PetscObjectComm((PetscObject)part),&size);
69: if (part->n != size) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Currently only supports one domain per processor");
70: p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
71: if (p*p != part->n) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires \"perfect square\" number of domains");
73: MatGetSize(part->adj,&N,NULL);
74: n = (PetscInt)PetscSqrtReal((PetscReal)N);
75: if (n*n != N) SETERRQ(PetscObjectComm((PetscObject)part),PETSC_ERR_SUP,"Square partitioning requires square domain");
76: if (n%p != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Square partitioning requires p to divide n");
77: MatGetOwnershipRange(part->adj,&rstart,&rend);
78: PetscMalloc1(rend-rstart,&color);
79: /* for (int cell=rstart; cell<rend; cell++) { color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); } */
80: for (cell=rstart; cell<rend; cell++) {
81: color[cell-rstart] = ((cell%n) / (n/p)) + p * ((cell/n) / (n/p));
82: }
83: ISCreateGeneral(PetscObjectComm((PetscObject)part),rend-rstart,color,PETSC_OWN_POINTER,partitioning);
84: return(0);
85: }
87: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
88: {
90: part->ops->apply = MatPartitioningApply_Current;
91: part->ops->view = NULL;
92: part->ops->destroy = NULL;
93: return(0);
94: }
96: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
97: {
99: part->ops->apply = MatPartitioningApply_Average;
100: part->ops->view = NULL;
101: part->ops->destroy = NULL;
102: return(0);
103: }
105: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
106: {
108: part->ops->apply = MatPartitioningApply_Square;
109: part->ops->view = NULL;
110: part->ops->destroy = NULL;
111: return(0);
112: }
114: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
115: seps[ 0 : 2*p) : the start and end node of each subdomain
116: seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
117: levels[ 0 : p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
118: The arrays must be large enough
119: */
120: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
121: {
122: PetscInt l2p,i,pTree,pStartTree;
126: l2p = PetscLog2Real(p);
127: if (l2p - (PetscInt)PetscLog2Real(p)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"%D is not a power of 2",p);
128: if (!p) return(0);
129: PetscArrayzero(seps,2*p-2);
130: PetscArrayzero(level,p-1);
131: seps[2*p-2] = sizes[2*p-2];
132: pTree = p;
133: pStartTree = 0;
134: while (pTree != 1) {
135: for (i = pStartTree; i < pStartTree + pTree; i++) {
136: seps[i] += sizes[i];
137: seps[pStartTree + pTree + (i-pStartTree)/2] += seps[i];
138: }
139: pStartTree += pTree;
140: pTree = pTree/2;
141: }
142: seps[2*p-2] -= sizes[2*p-2];
144: pStartTree = 2*p-2;
145: pTree = 1;
146: while (pStartTree > 0) {
147: for (i = pStartTree; i < pStartTree + pTree; i++) {
148: PetscInt k = 2*i - (pStartTree +2*pTree);
149: PetscInt n = seps[k+1];
151: seps[k+1] = seps[i] - sizes[k+1];
152: seps[k] = seps[k+1] + sizes[k+1] - n - sizes[k];
153: level[i-p] = -pTree - i + pStartTree;
154: }
155: pTree *= 2;
156: pStartTree -= pTree;
157: }
158: /* I know there should be a formula */
159: PetscSortIntWithArrayPair(p-1,seps+p,sizes+p,level);
160: for (i=2*p-2;i>=0;i--) { seps[2*i] = seps[i]; seps[2*i+1] = seps[i] + PetscMax(sizes[i] - 1,0); }
161: return(0);
162: }
164: /* ===========================================================================================*/
166: PetscFunctionList MatPartitioningList = NULL;
167: PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE;
169: /*@C
170: MatPartitioningRegister - Adds a new sparse matrix partitioning to the matrix package.
172: Not Collective
174: Input Parameters:
175: + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
176: - function - function pointer that creates the partitioning type
178: Level: developer
180: Sample usage:
181: .vb
182: MatPartitioningRegister("my_part",MyPartCreate);
183: .ve
185: Then, your partitioner can be chosen with the procedural interface via
186: $ MatPartitioningSetType(part,"my_part")
187: or at runtime via the option
188: $ -mat_partitioning_type my_part
190: .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
191: @*/
192: PetscErrorCode MatPartitioningRegister(const char sname[],PetscErrorCode (*function)(MatPartitioning))
193: {
197: MatInitializePackage();
198: PetscFunctionListAdd(&MatPartitioningList,sname,function);
199: return(0);
200: }
202: /*@C
203: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
204: from the partitioning context.
206: Not collective
208: Input Parameter:
209: . partitioning - the partitioning context
211: Output Parameter:
212: . type - partitioner type
214: Level: intermediate
216: Not Collective
218: @*/
219: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning,MatPartitioningType *type)
220: {
224: *type = ((PetscObject)partitioning)->type_name;
225: return(0);
226: }
228: /*@C
229: MatPartitioningSetNParts - Set how many partitions need to be created;
230: by default this is one per processor. Certain partitioning schemes may
231: in fact only support that option.
233: Not collective
235: Input Parameters:
236: + partitioning - the partitioning context
237: - n - the number of partitions
239: Level: intermediate
241: Not Collective
243: .seealso: MatPartitioningCreate(), MatPartitioningApply()
244: @*/
245: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part,PetscInt n)
246: {
248: part->n = n;
249: return(0);
250: }
252: /*@
253: MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
255: Collective on Mat
257: Input Parameters:
258: . matp - the matrix partitioning object
260: Output Parameters:
261: . partitioning - the partitioning. For each local node, a positive value indicates the processor
262: number the node has been assigned to. Negative x values indicate the separator level -(x+1).
264: Level: beginner
266: The user can define additional partitionings; see MatPartitioningRegister().
268: .seealso: MatPartitioningRegister(), MatPartitioningCreate(),
269: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
270: ISPartitioningCount()
271: @*/
272: PetscErrorCode MatPartitioningApplyND(MatPartitioning matp,IS *partitioning)
273: {
279: if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
280: if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
281: if (!matp->ops->applynd) SETERRQ1(PetscObjectComm((PetscObject)matp),PETSC_ERR_SUP,"Nested dissection not provided by MatPartitioningType %s",((PetscObject)matp)->type_name);
282: PetscLogEventBegin(MAT_PartitioningND,matp,0,0,0);
283: (*matp->ops->applynd)(matp,partitioning);
284: PetscLogEventEnd(MAT_PartitioningND,matp,0,0,0);
286: MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
287: ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");
288: return(0);
289: }
291: /*@
292: MatPartitioningApply - Gets a partitioning for a matrix.
294: Collective on Mat
296: Input Parameters:
297: . matp - the matrix partitioning object
299: Output Parameters:
300: . partitioning - the partitioning. For each local node this tells the processor
301: number that that node is assigned to.
303: Options Database Keys:
304: To specify the partitioning through the options database, use one of
305: the following
306: $ -mat_partitioning_type parmetis, -mat_partitioning current
307: To see the partitioning result
308: $ -mat_partitioning_view
310: Level: beginner
312: The user can define additional partitionings; see MatPartitioningRegister().
314: .seealso: MatPartitioningRegister(), MatPartitioningCreate(),
315: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
316: ISPartitioningCount()
317: @*/
318: PetscErrorCode MatPartitioningApply(MatPartitioning matp,IS *partitioning)
319: {
321: PetscBool viewbalance,improve;
326: if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
327: if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
328: if (!matp->ops->apply) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Must set type with MatPartitioningSetFromOptions() or MatPartitioningSetType()");
329: PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
330: (*matp->ops->apply)(matp,partitioning);
331: PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
333: MatPartitioningViewFromOptions(matp,NULL,"-mat_partitioning_view");
334: ISViewFromOptions(*partitioning,NULL,"-mat_partitioning_view");
336: PetscObjectOptionsBegin((PetscObject)matp);
337: viewbalance = PETSC_FALSE;
338: PetscOptionsBool("-mat_partitioning_view_imbalance","Display imbalance information of a partition",NULL,PETSC_FALSE,&viewbalance,NULL);
339: improve = PETSC_FALSE;
340: PetscOptionsBool("-mat_partitioning_improve","Improve the quality of a partition",NULL,PETSC_FALSE,&improve,NULL);
341: PetscOptionsEnd();
343: if (improve) {
344: MatPartitioningImprove(matp,partitioning);
345: }
347: if (viewbalance) {
348: MatPartitioningViewImbalance(matp,*partitioning);
349: }
350: return(0);
351: }
353: /*@
354: MatPartitioningImprove - Improves the quality of a given partition.
356: Collective on Mat
358: Input Parameters:
359: + matp - the matrix partitioning object
360: - partitioning - the partitioning. For each local node this tells the processor
361: number that that node is assigned to.
363: Output Parameters:
364: . partitioning - the partitioning. For each local node this tells the processor
365: number that that node is assigned to.
367: Options Database Keys:
368: To improve the quality of the partition
369: $ -mat_partitioning_improve
371: Level: beginner
373: .seealso: MatPartitioningApply(), MatPartitioningCreate(),
374: MatPartitioningDestroy(), MatPartitioningSetAdjacency(), ISPartitioningToNumbering(),
375: ISPartitioningCount()
376: @*/
377: PetscErrorCode MatPartitioningImprove(MatPartitioning matp,IS *partitioning)
378: {
384: if (!matp->adj->assembled) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
385: if (matp->adj->factortype) SETERRQ(PetscObjectComm((PetscObject)matp),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
386: PetscLogEventBegin(MAT_Partitioning,matp,0,0,0);
387: if (matp->ops->improve) {
388: (*matp->ops->improve)(matp,partitioning);
389: }
390: PetscLogEventEnd(MAT_Partitioning,matp,0,0,0);
391: return(0);
392: }
394: /*@
395: MatPartitioningViewImbalance - Display partitioning imbalance information.
397: Collective on MatPartitioning
399: Input Parameters:
400: + matp - the matrix partitioning object
401: - partitioning - the partitioning. For each local node this tells the processor
402: number that that node is assigned to.
404: Options Database Keys:
405: To see the partitioning imbalance information
406: $ -mat_partitioning_view_balance
408: Level: beginner
410: .seealso: MatPartitioningApply(), MatPartitioningView()
411: @*/
412: PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
413: {
414: PetscErrorCode ierr;
415: PetscInt nparts,*subdomainsizes,*subdomainsizes_tmp,nlocal,i,maxsub,minsub,avgsub;
416: const PetscInt *indices;
417: PetscViewer viewer;
422: nparts = matp->n;
423: PetscCalloc2(nparts,&subdomainsizes,nparts,&subdomainsizes_tmp);
424: ISGetLocalSize(partitioning,&nlocal);
425: ISGetIndices(partitioning,&indices);
426: for (i=0;i<nlocal;i++) {
427: subdomainsizes_tmp[indices[i]] += matp->vertex_weights? matp->vertex_weights[i]:1;
428: }
429: MPI_Allreduce(subdomainsizes_tmp,subdomainsizes,nparts,MPIU_INT,MPI_SUM, PetscObjectComm((PetscObject)matp));
430: ISRestoreIndices(partitioning,&indices);
431: minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub=0;
432: for (i=0; i<nparts; i++) {
433: minsub = PetscMin(minsub,subdomainsizes[i]);
434: maxsub = PetscMax(maxsub,subdomainsizes[i]);
435: avgsub += subdomainsizes[i];
436: }
437: avgsub /=nparts;
438: PetscFree2(subdomainsizes,subdomainsizes_tmp);
439: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp),&viewer);
440: MatPartitioningView(matp,viewer);
441: PetscViewerASCIIPrintf(viewer,"Partitioning Imbalance Info: Max %D, Min %D, Avg %D, R %g\n",maxsub, minsub, avgsub, (double)(maxsub/(PetscReal)minsub));
442: return(0);
443: }
445: /*@
446: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
447: partitioned.
449: Collective on MatPartitioning
451: Input Parameters:
452: + part - the partitioning context
453: - adj - the adjacency matrix
455: Level: beginner
457: .seealso: MatPartitioningCreate()
458: @*/
459: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part,Mat adj)
460: {
464: part->adj = adj;
465: return(0);
466: }
468: /*@
469: MatPartitioningDestroy - Destroys the partitioning context.
471: Collective on Partitioning
473: Input Parameters:
474: . part - the partitioning context
476: Level: beginner
478: .seealso: MatPartitioningCreate()
479: @*/
480: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
481: {
485: if (!*part) return(0);
487: if (--((PetscObject)(*part))->refct > 0) {*part = NULL; return(0);}
489: if ((*part)->ops->destroy) {
490: (*(*part)->ops->destroy)((*part));
491: }
492: PetscFree((*part)->vertex_weights);
493: PetscFree((*part)->part_weights);
494: PetscHeaderDestroy(part);
495: return(0);
496: }
498: /*@C
499: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
501: Logically Collective on Partitioning
503: Input Parameters:
504: + part - the partitioning context
505: - weights - the weights, on each process this array must have the same size as the number of local rows
507: Level: beginner
509: Notes:
510: The array weights is freed by PETSc so the user should not free the array. In C/C++
511: the array must be obtained with a call to PetscMalloc(), not malloc().
513: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetPartitionWeights()
514: @*/
515: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part,const PetscInt weights[])
516: {
521: PetscFree(part->vertex_weights);
522: part->vertex_weights = (PetscInt*)weights;
523: return(0);
524: }
526: /*@C
527: MatPartitioningSetPartitionWeights - Sets the weights for each partition.
529: Logically Collective on Partitioning
531: Input Parameters:
532: + part - the partitioning context
533: - weights - An array of size nparts that is used to specify the fraction of
534: vertex weight that should be distributed to each sub-domain for
535: the balance constraint. If all of the sub-domains are to be of
536: the same size, then each of the nparts elements should be set
537: to a value of 1/nparts. Note that the sum of all of the weights
538: should be one.
540: Level: beginner
542: Notes:
543: The array weights is freed by PETSc so the user should not free the array. In C/C++
544: the array must be obtained with a call to PetscMalloc(), not malloc().
546: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights()
547: @*/
548: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part,const PetscReal weights[])
549: {
554: PetscFree(part->part_weights);
555: part->part_weights = (PetscReal*)weights;
556: return(0);
557: }
559: /*@
560: MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.
562: Logically Collective on Partitioning
564: Input Parameters:
565: + part - the partitioning context
566: - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
567: that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
568: edge weights are stored in an ADJ matrix.
569: Level: beginner
571: Options Database Keys:
572: . -mat_partitioning_use_edge_weights - (true or false)
574: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights(), MatPartitioningSetPartitionWeights()
575: @*/
576: PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part,PetscBool use_edge_weights)
577: {
580: part->use_edge_weights = use_edge_weights;
581: return(0);
582: }
584: /*@
585: MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.
587: Logically Collective on Partitioning
589: Input Parameters:
590: . part - the partitioning context
592: Output Parameters:
593: . use_edge_weights - the flag indicateing whether or not to edge weights are used.
595: Level: beginner
597: .seealso: MatPartitioningCreate(), MatPartitioningSetType(), MatPartitioningSetVertexWeights(), MatPartitioningSetPartitionWeights(),
598: MatPartitioningSetUseEdgeWeights
599: @*/
600: PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part,PetscBool *use_edge_weights)
601: {
605: *use_edge_weights = part->use_edge_weights;
606: return(0);
607: }
609: /*@
610: MatPartitioningCreate - Creates a partitioning context.
612: Collective
614: Input Parameter:
615: . comm - MPI communicator
617: Output Parameter:
618: . newp - location to put the context
620: Level: beginner
622: .seealso: MatPartitioningSetType(), MatPartitioningApply(), MatPartitioningDestroy(),
623: MatPartitioningSetAdjacency()
625: @*/
626: PetscErrorCode MatPartitioningCreate(MPI_Comm comm,MatPartitioning *newp)
627: {
628: MatPartitioning part;
629: PetscErrorCode ierr;
630: PetscMPIInt size;
633: *newp = NULL;
635: MatInitializePackage();
636: PetscHeaderCreate(part,MAT_PARTITIONING_CLASSID,"MatPartitioning","Matrix/graph partitioning","MatOrderings",comm,MatPartitioningDestroy,MatPartitioningView);
637: part->vertex_weights = NULL;
638: part->part_weights = NULL;
639: part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */
641: MPI_Comm_size(comm,&size);
642: part->n = (PetscInt)size;
644: *newp = part;
645: return(0);
646: }
648: /*@C
649: MatPartitioningViewFromOptions - View from Options
651: Collective on MatPartitioning
653: Input Parameters:
654: + A - the partitioning context
655: . obj - Optional object
656: - name - command line option
658: Level: intermediate
659: .seealso: MatPartitioning, MatPartitioningView, PetscObjectViewFromOptions(), MatPartitioningCreate()
660: @*/
661: PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A,PetscObject obj,const char name[])
662: {
667: PetscObjectViewFromOptions((PetscObject)A,obj,name);
668: return(0);
669: }
671: /*@C
672: MatPartitioningView - Prints the partitioning data structure.
674: Collective on MatPartitioning
676: Input Parameters:
677: + part - the partitioning context
678: - viewer - optional visualization context
680: Level: intermediate
682: Note:
683: The available visualization contexts include
684: + PETSC_VIEWER_STDOUT_SELF - standard output (default)
685: - PETSC_VIEWER_STDOUT_WORLD - synchronized standard
686: output where only the first processor opens
687: the file. All other processors send their
688: data to the first processor to print.
690: The user can open alternative visualization contexts with
691: . PetscViewerASCIIOpen() - output to a specified file
693: .seealso: PetscViewerASCIIOpen()
694: @*/
695: PetscErrorCode MatPartitioningView(MatPartitioning part,PetscViewer viewer)
696: {
698: PetscBool iascii;
702: if (!viewer) {
703: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part),&viewer);
704: }
708: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
709: if (iascii) {
710: PetscObjectPrintClassNamePrefixType((PetscObject)part,viewer);
711: if (part->vertex_weights) {
712: PetscViewerASCIIPrintf(viewer," Using vertex weights\n");
713: }
714: }
715: if (part->ops->view) {
716: PetscViewerASCIIPushTab(viewer);
717: (*part->ops->view)(part,viewer);
718: PetscViewerASCIIPopTab(viewer);
719: }
720: return(0);
721: }
723: /*@C
724: MatPartitioningSetType - Sets the type of partitioner to use
726: Collective on MatPartitioning
728: Input Parameters:
729: + part - the partitioning context.
730: - type - a known method
732: Options Database Command:
733: $ -mat_partitioning_type <type>
734: $ Use -help for a list of available methods
735: $ (for instance, parmetis)
737: Level: intermediate
739: .seealso: MatPartitioningCreate(), MatPartitioningApply(), MatPartitioningType
741: @*/
742: PetscErrorCode MatPartitioningSetType(MatPartitioning part,MatPartitioningType type)
743: {
744: PetscErrorCode ierr,(*r)(MatPartitioning);
745: PetscBool match;
751: PetscObjectTypeCompare((PetscObject)part,type,&match);
752: if (match) return(0);
754: if (part->ops->destroy) {
755: (*part->ops->destroy)(part);
756: part->ops->destroy = NULL;
757: }
758: part->setupcalled = 0;
759: part->data = NULL;
760: PetscMemzero(part->ops,sizeof(struct _MatPartitioningOps));
762: PetscFunctionListFind(MatPartitioningList,type,&r);
763: if (!r) SETERRQ1(PetscObjectComm((PetscObject)part),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown partitioning type %s",type);
765: (*r)(part);
767: PetscFree(((PetscObject)part)->type_name);
768: PetscStrallocpy(type,&((PetscObject)part)->type_name);
769: return(0);
770: }
772: /*@
773: MatPartitioningSetFromOptions - Sets various partitioning options from the
774: options database.
776: Collective on MatPartitioning
778: Input Parameter:
779: . part - the partitioning context.
781: Options Database Command:
782: $ -mat_partitioning_type <type>
783: $ Use -help for a list of available methods
784: $ (for instance, parmetis)
785: $ -mat_partitioning_nparts - number of subgraphs
787: Notes:
788: If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
789: no installed partitioners it uses current which means no repartioning.
791: Level: beginner
793: @*/
794: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
795: {
797: PetscBool flag;
798: char type[256];
799: const char *def;
802: PetscObjectOptionsBegin((PetscObject)part);
803: if (!((PetscObject)part)->type_name) {
804: #if defined(PETSC_HAVE_PARMETIS)
805: def = MATPARTITIONINGPARMETIS;
806: #elif defined(PETSC_HAVE_CHACO)
807: def = MATPARTITIONINGCHACO;
808: #elif defined(PETSC_HAVE_PARTY)
809: def = MATPARTITIONINGPARTY;
810: #elif defined(PETSC_HAVE_PTSCOTCH)
811: def = MATPARTITIONINGPTSCOTCH;
812: #else
813: def = MATPARTITIONINGCURRENT;
814: #endif
815: } else {
816: def = ((PetscObject)part)->type_name;
817: }
818: PetscOptionsFList("-mat_partitioning_type","Type of partitioner","MatPartitioningSetType",MatPartitioningList,def,type,256,&flag);
819: if (flag) {
820: MatPartitioningSetType(part,type);
821: }
823: PetscOptionsInt("-mat_partitioning_nparts","number of fine parts",NULL,part->n,& part->n,&flag);
825: PetscOptionsBool("-mat_partitioning_use_edge_weights","whether or not to use edge weights",NULL,part->use_edge_weights,&part->use_edge_weights,&flag);
827: /*
828: Set the type if it was never set.
829: */
830: if (!((PetscObject)part)->type_name) {
831: MatPartitioningSetType(part,def);
832: }
834: if (part->ops->setfromoptions) {
835: (*part->ops->setfromoptions)(PetscOptionsObject,part);
836: }
837: PetscOptionsEnd();
838: return(0);
839: }