Actual source code: pipes.c
1: static char help[] = "This example demonstrates DMNetwork. It is used for testing parallel generation of dmnetwork, then redistribute. \n\\n";
2: /*
3: Example: mpiexec -n <np> ./pipes -ts_max_steps 10
4: */
6: #include "wash.h"
8: /*
9: WashNetworkDistribute - proc[0] distributes sequential wash object
10: Input Parameters:
11: . comm - MPI communicator
12: . wash - wash context with all network data in proc[0]
14: Output Parameter:
15: . wash - wash context with nedge, nvertex and edgelist distributed
17: Note: The routine is used for testing parallel generation of dmnetwork, then redistribute.
18: */
19: PetscErrorCode WashNetworkDistribute(MPI_Comm comm,Wash wash)
20: {
22: PetscMPIInt rank,size,tag=0;
23: PetscInt i,e,v,numEdges,numVertices,nedges,*eowners=NULL,estart,eend,*vtype=NULL,nvertices;
24: PetscInt *edgelist = wash->edgelist,*nvtx=NULL,*vtxDone=NULL;
27: MPI_Comm_size(comm,&size);
28: if (size == 1) return(0);
30: MPI_Comm_rank(comm,&rank);
31: numEdges = wash->nedge;
32: numVertices = wash->nvertex;
34: /* (1) all processes get global and local number of edges */
35: MPI_Bcast(&numEdges,1,MPIU_INT,0,comm);
36: nedges = numEdges/size; /* local nedges */
37: if (rank == 0) {
38: nedges += numEdges - size*(numEdges/size);
39: }
40: wash->Nedge = numEdges;
41: wash->nedge = nedges;
42: /* PetscPrintf(PETSC_COMM_SELF,"[%d] nedges %d, numEdges %d\n",rank,nedges,numEdges); */
44: PetscCalloc3(size+1,&eowners,size,&nvtx,numVertices,&vtxDone);
45: MPI_Allgather(&nedges,1,MPIU_INT,eowners+1,1,MPIU_INT,PETSC_COMM_WORLD);
46: eowners[0] = 0;
47: for (i=2; i<=size; i++) {
48: eowners[i] += eowners[i-1];
49: }
51: estart = eowners[rank];
52: eend = eowners[rank+1];
53: /* PetscPrintf(PETSC_COMM_SELF,"[%d] own lists row %d - %d\n",rank,estart,eend); */
55: /* (2) distribute row block edgelist to all processors */
56: if (rank == 0) {
57: vtype = wash->vtype;
58: for (i=1; i<size; i++) {
59: /* proc[0] sends edgelist to proc[i] */
60: MPI_Send(edgelist+2*eowners[i],2*(eowners[i+1]-eowners[i]),MPIU_INT,i,tag,comm);
62: /* proc[0] sends vtype to proc[i] */
63: MPI_Send(vtype+2*eowners[i],2*(eowners[i+1]-eowners[i]),MPIU_INT,i,tag,comm);
64: }
65: } else {
66: MPI_Status status;
67: PetscMalloc1(2*(eend-estart),&vtype);
68: PetscMalloc1(2*(eend-estart),&edgelist);
70: MPI_Recv(edgelist,2*(eend-estart),MPIU_INT,0,tag,comm,&status);
71: MPI_Recv(vtype,2*(eend-estart),MPIU_INT,0,tag,comm,&status);
72: }
74: wash->edgelist = edgelist;
76: /* (3) all processes get global and local number of vertices, without ghost vertices */
77: if (rank == 0) {
78: for (i=0; i<size; i++) {
79: for (e=eowners[i]; e<eowners[i+1]; e++) {
80: v = edgelist[2*e];
81: if (!vtxDone[v]) {
82: nvtx[i]++; vtxDone[v] = 1;
83: }
84: v = edgelist[2*e+1];
85: if (!vtxDone[v]) {
86: nvtx[i]++; vtxDone[v] = 1;
87: }
88: }
89: }
90: }
91: MPI_Bcast(&numVertices,1,MPIU_INT,0,PETSC_COMM_WORLD);
92: MPI_Scatter(nvtx,1,MPIU_INT,&nvertices,1,MPIU_INT,0,PETSC_COMM_WORLD);
93: PetscFree3(eowners,nvtx,vtxDone);
95: wash->Nvertex = numVertices;
96: wash->nvertex = nvertices;
97: wash->vtype = vtype;
98: return(0);
99: }
101: PetscErrorCode WASHIFunction(TS ts,PetscReal t,Vec X,Vec Xdot,Vec F,void* ctx)
102: {
104: Wash wash=(Wash)ctx;
105: DM networkdm;
106: Vec localX,localXdot,localF, localXold;
107: const PetscInt *cone;
108: PetscInt vfrom,vto,offsetfrom,offsetto,varoffset;
109: PetscInt v,vStart,vEnd,e,eStart,eEnd;
110: PetscInt nend,type;
111: PetscBool ghost;
112: PetscScalar *farr,*juncf, *pipef;
113: PetscReal dt;
114: Pipe pipe;
115: PipeField *pipex,*pipexdot,*juncx;
116: Junction junction;
117: DMDALocalInfo info;
118: const PetscScalar *xarr,*xdotarr, *xoldarr;
121: localX = wash->localX;
122: localXdot = wash->localXdot;
124: TSGetSolution(ts,&localXold);
125: TSGetDM(ts,&networkdm);
126: TSGetTimeStep(ts,&dt);
127: DMGetLocalVector(networkdm,&localF);
129: /* Set F and localF as zero */
130: VecSet(F,0.0);
131: VecSet(localF,0.0);
133: /* Update ghost values of locaX and locaXdot */
134: DMGlobalToLocalBegin(networkdm,X,INSERT_VALUES,localX);
135: DMGlobalToLocalEnd(networkdm,X,INSERT_VALUES,localX);
137: DMGlobalToLocalBegin(networkdm,Xdot,INSERT_VALUES,localXdot);
138: DMGlobalToLocalEnd(networkdm,Xdot,INSERT_VALUES,localXdot);
140: VecGetArrayRead(localX,&xarr);
141: VecGetArrayRead(localXdot,&xdotarr);
142: VecGetArrayRead(localXold,&xoldarr);
143: VecGetArray(localF,&farr);
145: /* junction->type == JUNCTION:
146: juncf[0] = -qJ + sum(qin); juncf[1] = qJ - sum(qout)
147: junction->type == RESERVOIR (upper stream):
148: juncf[0] = -hJ + H0; juncf[1] = qJ - sum(qout)
149: junction->type == VALVE (down stream):
150: juncf[0] = -qJ + sum(qin); juncf[1] = qJ
151: */
152: /* Vertex/junction initialization */
153: DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);
154: for (v=vStart; v<vEnd; v++) {
155: DMNetworkIsGhostVertex(networkdm,v,&ghost);
156: if (ghost) continue;
158: DMNetworkGetComponent(networkdm,v,0,&type,(void**)&junction,NULL);
159: DMNetworkGetLocalVecOffset(networkdm,v,ALL_COMPONENTS,&varoffset);
160: juncx = (PipeField*)(xarr + varoffset);
161: juncf = (PetscScalar*)(farr + varoffset);
163: juncf[0] = -juncx[0].q;
164: juncf[1] = juncx[0].q;
166: if (junction->type == RESERVOIR) { /* upstream reservoir */
167: juncf[0] = juncx[0].h - wash->H0;
168: }
169: }
171: /* Edge/pipe */
172: DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);
173: for (e=eStart; e<eEnd; e++) {
174: DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe,NULL);
175: DMNetworkGetLocalVecOffset(networkdm,e,ALL_COMPONENTS,&varoffset);
176: pipex = (PipeField*)(xarr + varoffset);
177: pipexdot = (PipeField*)(xdotarr + varoffset);
178: pipef = (PetscScalar*)(farr + varoffset);
180: /* Get some data into the pipe structure: note, some of these operations
181: * might be redundant. Will it consume too much time? */
182: pipe->dt = dt;
183: pipe->xold = (PipeField*)(xoldarr + varoffset);
185: /* Evaluate F over this edge/pipe: pipef[1], ...,pipef[2*nend] */
186: DMDAGetLocalInfo(pipe->da,&info);
187: PipeIFunctionLocal_Lax(&info,t,pipex,pipexdot,pipef,pipe);
189: /* Get boundary values from connected vertices */
190: DMNetworkGetConnectedVertices(networkdm,e,&cone);
191: vfrom = cone[0]; /* local ordering */
192: vto = cone[1];
193: DMNetworkGetLocalVecOffset(networkdm,vfrom,ALL_COMPONENTS,&offsetfrom);
194: DMNetworkGetLocalVecOffset(networkdm,vto,ALL_COMPONENTS,&offsetto);
196: /* Evaluate upstream boundary */
197: DMNetworkGetComponent(networkdm,vfrom,0,&type,(void**)&junction,NULL);
198: if (junction->type != JUNCTION && junction->type != RESERVOIR) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"junction type is not supported");
199: juncx = (PipeField*)(xarr + offsetfrom);
200: juncf = (PetscScalar*)(farr + offsetfrom);
202: pipef[0] = pipex[0].h - juncx[0].h;
203: juncf[1] -= pipex[0].q;
205: /* Evaluate downstream boundary */
206: DMNetworkGetComponent(networkdm,vto,0,&type,(void**)&junction,NULL);
207: if (junction->type != JUNCTION && junction->type != VALVE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"junction type is not supported");
208: juncx = (PipeField*)(xarr + offsetto);
209: juncf = (PetscScalar*)(farr + offsetto);
210: nend = pipe->nnodes - 1;
212: pipef[2*nend + 1] = pipex[nend].h - juncx[0].h;
213: juncf[0] += pipex[nend].q;
214: }
216: VecRestoreArrayRead(localX,&xarr);
217: VecRestoreArrayRead(localXdot,&xdotarr);
218: VecRestoreArray(localF,&farr);
220: DMLocalToGlobalBegin(networkdm,localF,ADD_VALUES,F);
221: DMLocalToGlobalEnd(networkdm,localF,ADD_VALUES,F);
222: DMRestoreLocalVector(networkdm,&localF);
223: /*
224: PetscPrintf(PETSC_COMM_WORLD("F:\n");
225: VecView(F,PETSC_VIEWER_STDOUT_WORLD);
226: */
227: return(0);
228: }
230: PetscErrorCode WASHSetInitialSolution(DM networkdm,Vec X,Wash wash)
231: {
233: PetscInt k,nx,vkey,vfrom,vto,offsetfrom,offsetto;
234: PetscInt type,varoffset;
235: PetscInt e,eStart,eEnd;
236: Vec localX;
237: PetscScalar *xarr;
238: Pipe pipe;
239: Junction junction;
240: const PetscInt *cone;
241: const PetscScalar *xarray;
244: VecSet(X,0.0);
245: DMGetLocalVector(networkdm,&localX);
246: VecGetArray(localX,&xarr);
248: /* Edge */
249: DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);
250: for (e=eStart; e<eEnd; e++) {
251: DMNetworkGetLocalVecOffset(networkdm,e,ALL_COMPONENTS,&varoffset);
252: DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe,NULL);
254: /* set initial values for this pipe */
255: PipeComputeSteadyState(pipe,wash->Q0,wash->H0);
256: VecGetSize(pipe->x,&nx);
258: VecGetArrayRead(pipe->x,&xarray);
259: /* copy pipe->x to xarray */
260: for (k=0; k<nx; k++) {
261: (xarr+varoffset)[k] = xarray[k];
262: }
264: /* set boundary values into vfrom and vto */
265: DMNetworkGetConnectedVertices(networkdm,e,&cone);
266: vfrom = cone[0]; /* local ordering */
267: vto = cone[1];
268: DMNetworkGetLocalVecOffset(networkdm,vfrom,ALL_COMPONENTS,&offsetfrom);
269: DMNetworkGetLocalVecOffset(networkdm,vto,ALL_COMPONENTS,&offsetto);
271: /* if vform is a head vertex: */
272: DMNetworkGetComponent(networkdm,vfrom,0,&vkey,(void**)&junction,NULL);
273: if (junction->type == RESERVOIR) {
274: (xarr+offsetfrom)[1] = wash->H0; /* 1st H */
275: }
277: /* if vto is an end vertex: */
278: DMNetworkGetComponent(networkdm,vto,0,&vkey,(void**)&junction,NULL);
279: if (junction->type == VALVE) {
280: (xarr+offsetto)[0] = wash->QL; /* last Q */
281: }
282: VecRestoreArrayRead(pipe->x,&xarray);
283: }
285: VecRestoreArray(localX,&xarr);
286: DMLocalToGlobalBegin(networkdm,localX,ADD_VALUES,X);
287: DMLocalToGlobalEnd(networkdm,localX,ADD_VALUES,X);
288: DMRestoreLocalVector(networkdm,&localX);
290: #if 0
291: PetscInt N;
292: VecGetSize(X,&N);
293: PetscPrintf(PETSC_COMM_WORLD,"initial solution %d:\n",N);
294: VecView(X,PETSC_VIEWER_STDOUT_WORLD);
295: #endif
296: return(0);
297: }
299: PetscErrorCode TSDMNetworkMonitor(TS ts, PetscInt step, PetscReal t, Vec x, void *context)
300: {
301: PetscErrorCode ierr;
302: DMNetworkMonitor monitor;
305: monitor = (DMNetworkMonitor)context;
306: DMNetworkMonitorView(monitor,x);
307: return(0);
308: }
310: PetscErrorCode PipesView(DM networkdm,PetscInt KeyPipe,Vec X)
311: {
313: PetscInt i,numkeys=1,*blocksize,*numselectedvariable,**selectedvariables,n;
314: IS isfrom_q,isfrom_h,isfrom;
315: Vec Xto;
316: VecScatter ctx;
317: MPI_Comm comm;
320: PetscObjectGetComm((PetscObject)networkdm,&comm);
322: /* 1. Create isfrom_q for q-variable of pipes */
323: PetscMalloc3(numkeys,&blocksize,numkeys,&numselectedvariable,numkeys,&selectedvariables);
324: for (i=0; i<numkeys; i++) {
325: blocksize[i] = 2;
326: numselectedvariable[i] = 1;
327: PetscMalloc1(numselectedvariable[i],&selectedvariables[i]);
328: selectedvariables[i][0] = 0; /* q-variable */
329: }
330: DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,blocksize,numselectedvariable,selectedvariables,&isfrom_q);
332: /* 2. Create Xto and isto */
333: ISGetLocalSize(isfrom_q, &n);
334: VecCreate(comm,&Xto);
335: VecSetSizes(Xto,n,PETSC_DECIDE);
336: VecSetFromOptions(Xto);
337: VecSet(Xto,0.0);
339: /* 3. Create scatter */
340: VecScatterCreate(X,isfrom_q,Xto,NULL,&ctx);
342: /* 4. Scatter to Xq */
343: VecScatterBegin(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);
344: VecScatterEnd(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);
345: VecScatterDestroy(&ctx);
346: ISDestroy(&isfrom_q);
348: PetscPrintf(PETSC_COMM_WORLD,"Xq:\n");
349: VecView(Xto,PETSC_VIEWER_STDOUT_WORLD);
351: /* 5. Create isfrom_h for h-variable of pipes; Create scatter; Scatter to Xh */
352: for (i=0; i<numkeys; i++) {
353: selectedvariables[i][0] = 1; /* h-variable */
354: }
355: DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,blocksize,numselectedvariable,selectedvariables,&isfrom_h);
357: VecScatterCreate(X,isfrom_h,Xto,NULL,&ctx);
358: VecScatterBegin(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);
359: VecScatterEnd(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);
360: VecScatterDestroy(&ctx);
361: ISDestroy(&isfrom_h);
363: PetscPrintf(PETSC_COMM_WORLD,"Xh:\n");
364: VecView(Xto,PETSC_VIEWER_STDOUT_WORLD);
365: VecDestroy(&Xto);
367: /* 6. Create isfrom for all pipe variables; Create scatter; Scatter to Xpipes */
368: for (i=0; i<numkeys; i++) {
369: blocksize[i] = -1; /* select all the variables of the i-th component */
370: }
371: DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,blocksize,NULL,NULL,&isfrom);
372: ISDestroy(&isfrom);
373: DMNetworkCreateIS(networkdm,numkeys,&KeyPipe,NULL,NULL,NULL,&isfrom);
375: ISGetLocalSize(isfrom, &n);
376: VecCreate(comm,&Xto);
377: VecSetSizes(Xto,n,PETSC_DECIDE);
378: VecSetFromOptions(Xto);
379: VecSet(Xto,0.0);
381: VecScatterCreate(X,isfrom,Xto,NULL,&ctx);
382: VecScatterBegin(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);
383: VecScatterEnd(ctx,X,Xto,INSERT_VALUES,SCATTER_FORWARD);
384: VecScatterDestroy(&ctx);
385: ISDestroy(&isfrom);
387: PetscPrintf(PETSC_COMM_WORLD,"Xpipes:\n");
388: VecView(Xto,PETSC_VIEWER_STDOUT_WORLD);
390: /* 7. Free spaces */
391: for (i=0; i<numkeys; i++) {
392: PetscFree(selectedvariables[i]);
393: }
394: PetscFree3(blocksize,numselectedvariable,selectedvariables);
395: VecDestroy(&Xto);
396: return(0);
397: }
399: PetscErrorCode ISJunctionsView(DM networkdm,PetscInt KeyJunc)
400: {
402: PetscInt numkeys=1;
403: IS isfrom;
404: MPI_Comm comm;
405: PetscMPIInt rank;
408: PetscObjectGetComm((PetscObject)networkdm,&comm);
409: MPI_Comm_rank(comm,&rank);
411: /* Create a global isfrom for all junction variables */
412: DMNetworkCreateIS(networkdm,numkeys,&KeyJunc,NULL,NULL,NULL,&isfrom);
413: PetscPrintf(PETSC_COMM_WORLD,"ISJunctions:\n");
414: ISView(isfrom,PETSC_VIEWER_STDOUT_WORLD);
415: ISDestroy(&isfrom);
417: /* Create a local isfrom for all junction variables */
418: DMNetworkCreateLocalIS(networkdm,numkeys,&KeyJunc,NULL,NULL,NULL,&isfrom);
419: if (!rank) {
420: PetscPrintf(PETSC_COMM_SELF,"[%d] ISLocalJunctions:\n",rank);
421: ISView(isfrom,PETSC_VIEWER_STDOUT_SELF);
422: }
423: ISDestroy(&isfrom);
424: return(0);
425: }
427: PetscErrorCode WashNetworkCleanUp(Wash wash)
428: {
430: PetscMPIInt rank;
433: MPI_Comm_rank(wash->comm,&rank);
434: PetscFree(wash->edgelist);
435: PetscFree(wash->vtype);
436: if (rank == 0) {
437: PetscFree2(wash->junction,wash->pipe);
438: }
439: return(0);
440: }
442: PetscErrorCode WashNetworkCreate(MPI_Comm comm,PetscInt pipesCase,Wash *wash_ptr)
443: {
445: PetscInt npipes;
446: PetscMPIInt rank;
447: Wash wash=NULL;
448: PetscInt i,numVertices,numEdges,*vtype;
449: PetscInt *edgelist;
450: Junction junctions=NULL;
451: Pipe pipes=NULL;
452: PetscBool washdist=PETSC_TRUE;
455: MPI_Comm_rank(comm,&rank);
457: PetscCalloc1(1,&wash);
458: wash->comm = comm;
459: *wash_ptr = wash;
460: wash->Q0 = 0.477432; /* RESERVOIR */
461: wash->H0 = 150.0;
462: wash->HL = 143.488; /* VALVE */
463: wash->QL = 0.0;
464: wash->nnodes_loc = 0;
466: numVertices = 0;
467: numEdges = 0;
468: edgelist = NULL;
470: /* proc[0] creates a sequential wash and edgelist */
471: PetscPrintf(PETSC_COMM_WORLD,"Setup pipesCase %D\n",pipesCase);
473: /* Set global number of pipes, edges, and junctions */
474: /*-------------------------------------------------*/
475: switch (pipesCase) {
476: case 0:
477: /* pipeCase 0: */
478: /* =================================================
479: (RESERVOIR) v0 --E0--> v1--E1--> v2 --E2-->v3 (VALVE)
480: ==================================================== */
481: npipes = 3;
482: PetscOptionsGetInt(NULL,NULL, "-npipes", &npipes, NULL);
483: wash->nedge = npipes;
484: wash->nvertex = npipes + 1;
486: /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
487: numVertices = 0;
488: numEdges = 0;
489: edgelist = NULL;
490: if (rank == 0) {
491: numVertices = wash->nvertex;
492: numEdges = wash->nedge;
494: PetscCalloc1(2*numEdges,&edgelist);
495: for (i=0; i<numEdges; i++) {
496: edgelist[2*i] = i; edgelist[2*i+1] = i+1;
497: }
499: /* Add network components */
500: /*------------------------*/
501: PetscCalloc2(numVertices,&junctions,numEdges,&pipes);
503: /* vertex */
504: for (i=0; i<numVertices; i++) {
505: junctions[i].id = i;
506: junctions[i].type = JUNCTION;
507: }
509: junctions[0].type = RESERVOIR;
510: junctions[numVertices-1].type = VALVE;
511: }
512: break;
513: case 1:
514: /* pipeCase 1: */
515: /* ==========================
516: v2 (VALVE)
517: ^
518: |
519: E2
520: |
521: v0 --E0--> v3--E1--> v1
522: (RESERVOIR) (RESERVOIR)
523: ============================= */
524: npipes = 3;
525: wash->nedge = npipes;
526: wash->nvertex = npipes + 1;
528: /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
529: if (rank == 0) {
530: numVertices = wash->nvertex;
531: numEdges = wash->nedge;
533: PetscCalloc1(2*numEdges,&edgelist);
534: edgelist[0] = 0; edgelist[1] = 3; /* edge[0] */
535: edgelist[2] = 3; edgelist[3] = 1; /* edge[1] */
536: edgelist[4] = 3; edgelist[5] = 2; /* edge[2] */
538: /* Add network components */
539: /*------------------------*/
540: PetscCalloc2(numVertices,&junctions,numEdges,&pipes);
541: /* vertex */
542: for (i=0; i<numVertices; i++) {
543: junctions[i].id = i;
544: junctions[i].type = JUNCTION;
545: }
547: junctions[0].type = RESERVOIR;
548: junctions[1].type = VALVE;
549: junctions[2].type = VALVE;
550: }
551: break;
552: case 2:
553: /* pipeCase 2: */
554: /* ==========================
555: (RESERVOIR) v2--> E2
556: |
557: v0 --E0--> v3--E1--> v1
558: (RESERVOIR) (VALVE)
559: ============================= */
561: /* Set application parameters -- to be used in function evalutions */
562: npipes = 3;
563: wash->nedge = npipes;
564: wash->nvertex = npipes + 1;
566: /* Set local edges and vertices -- proc[0] sets entire network, then distributes */
567: if (rank == 0) {
568: numVertices = wash->nvertex;
569: numEdges = wash->nedge;
571: PetscCalloc1(2*numEdges,&edgelist);
572: edgelist[0] = 0; edgelist[1] = 3; /* edge[0] */
573: edgelist[2] = 3; edgelist[3] = 1; /* edge[1] */
574: edgelist[4] = 2; edgelist[5] = 3; /* edge[2] */
576: /* Add network components */
577: /*------------------------*/
578: PetscCalloc2(numVertices,&junctions,numEdges,&pipes);
579: /* vertex */
580: for (i=0; i<numVertices; i++) {
581: junctions[i].id = i;
582: junctions[i].type = JUNCTION;
583: }
585: junctions[0].type = RESERVOIR;
586: junctions[1].type = VALVE;
587: junctions[2].type = RESERVOIR;
588: }
589: break;
590: default:
591: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"not done yet");
592: }
594: /* set edge global id */
595: for (i=0; i<numEdges; i++) pipes[i].id = i;
597: if (rank == 0) { /* set vtype for proc[0] */
598: PetscInt v;
599: PetscMalloc1(2*numEdges,&vtype);
600: for (i=0; i<2*numEdges; i++) {
601: v = edgelist[i];
602: vtype[i] = junctions[v].type;
603: }
604: wash->vtype = vtype;
605: }
607: *wash_ptr = wash;
608: wash->nedge = numEdges;
609: wash->nvertex = numVertices;
610: wash->edgelist = edgelist;
611: wash->junction = junctions;
612: wash->pipe = pipes;
614: /* Distribute edgelist to other processors */
615: PetscOptionsGetBool(NULL,NULL,"-wash_distribute",&washdist,NULL);
616: if (washdist) {
617: /*
618: PetscPrintf(PETSC_COMM_WORLD," Distribute sequential wash ...\n");
619: */
620: WashNetworkDistribute(comm,wash);
621: }
622: return(0);
623: }
625: /* ------------------------------------------------------- */
626: int main(int argc,char ** argv)
627: {
628: PetscErrorCode ierr;
629: Wash wash;
630: Junction junctions,junction;
631: Pipe pipe,pipes;
632: PetscInt KeyPipe,KeyJunction,*edgelist = NULL,*vtype = NULL;
633: PetscInt i,e,v,eStart,eEnd,vStart,vEnd,key,vkey,type;
634: PetscInt steps=1,nedges,nnodes=6;
635: const PetscInt *cone;
636: DM networkdm;
637: PetscMPIInt size,rank;
638: PetscReal ftime;
639: Vec X;
640: TS ts;
641: TSConvergedReason reason;
642: PetscBool viewpipes,viewjuncs,monipipes=PETSC_FALSE,userJac=PETSC_TRUE,viewdm=PETSC_FALSE,viewX=PETSC_FALSE;
643: PetscInt pipesCase=0;
644: DMNetworkMonitor monitor;
645: MPI_Comm comm;
647: PetscInitialize(&argc,&argv,"pOption",help);if (ierr) return ierr;
649: /* Read runtime options */
650: PetscOptionsGetInt(NULL,NULL, "-case", &pipesCase, NULL);
651: PetscOptionsGetBool(NULL,NULL,"-user_Jac",&userJac,NULL);
652: PetscOptionsGetBool(NULL,NULL,"-pipe_monitor",&monipipes,NULL);
653: PetscOptionsGetBool(NULL,NULL,"-viewdm",&viewdm,NULL);
654: PetscOptionsGetBool(NULL,NULL,"-viewX",&viewX,NULL);
655: PetscOptionsGetInt(NULL,NULL, "-npipenodes", &nnodes, NULL);
657: /* Create networkdm */
658: /*------------------*/
659: DMNetworkCreate(PETSC_COMM_WORLD,&networkdm);
660: PetscObjectGetComm((PetscObject)networkdm,&comm);
661: MPI_Comm_rank(comm,&rank);
662: MPI_Comm_size(comm,&size);
664: if (size == 1 && monipipes) {
665: DMNetworkMonitorCreate(networkdm,&monitor);
666: }
668: /* Register the components in the network */
669: DMNetworkRegisterComponent(networkdm,"junctionstruct",sizeof(struct _p_Junction),&KeyJunction);
670: DMNetworkRegisterComponent(networkdm,"pipestruct",sizeof(struct _p_Pipe),&KeyPipe);
672: /* Create a distributed wash network (user-specific) */
673: WashNetworkCreate(comm,pipesCase,&wash);
674: nedges = wash->nedge;
675: edgelist = wash->edgelist;
676: vtype = wash->vtype;
677: junctions = wash->junction;
678: pipes = wash->pipe;
680: /* Set up the network layout */
681: DMNetworkSetNumSubNetworks(networkdm,PETSC_DECIDE,1);
682: DMNetworkAddSubnetwork(networkdm,NULL,nedges,edgelist,NULL);
684: DMNetworkLayoutSetUp(networkdm);
686: DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);
687: DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);
688: /* PetscPrintf(PETSC_COMM_SELF,"[%d] eStart/End: %d - %d; vStart/End: %d - %d\n",rank,eStart,eEnd,vStart,vEnd); */
690: if (rank) { /* junctions[] and pipes[] for proc[0] are allocated in WashNetworkCreate() */
691: /* vEnd - vStart = nvertices + number of ghost vertices! */
692: PetscCalloc2(vEnd - vStart,&junctions,nedges,&pipes);
693: }
695: /* Add Pipe component and number of variables to all local edges */
696: for (e = eStart; e < eEnd; e++) {
697: pipes[e-eStart].nnodes = nnodes;
698: DMNetworkAddComponent(networkdm,e,KeyPipe,&pipes[e-eStart],2*pipes[e-eStart].nnodes);
700: if (size == 1 && monipipes) { /* Add monitor -- show Q_{pipes[e-eStart].id}? */
701: pipes[e-eStart].length = 600.0;
702: DMNetworkMonitorAdd(monitor, "Pipe Q", e, pipes[e-eStart].nnodes, 0, 2, 0.0,pipes[e-eStart].length, -0.8, 0.8, PETSC_TRUE);
703: DMNetworkMonitorAdd(monitor, "Pipe H", e, pipes[e-eStart].nnodes, 1, 2, 0.0,pipes[e-eStart].length, -400.0, 800.0, PETSC_TRUE);
704: }
705: }
707: /* Add Junction component and number of variables to all local vertices, including ghost vertices! (current implementation requires setting the same number of variables at ghost points */
708: for (v = vStart; v < vEnd; v++) {
709: DMNetworkAddComponent(networkdm,v,KeyJunction,&junctions[v-vStart],2);
710: }
712: if (size > 1) { /* must be called before DMSetUp()???. Other partitioners do not work yet??? -- cause crash in proc[0]! */
713: DM plexdm;
714: PetscPartitioner part;
715: DMNetworkGetPlex(networkdm,&plexdm);
716: DMPlexGetPartitioner(plexdm, &part);
717: PetscPartitionerSetType(part,PETSCPARTITIONERSIMPLE);
718: PetscOptionsSetValue(NULL,"-dm_plex_csr_via_mat","true"); /* for parmetis */
719: }
721: /* Set up DM for use */
722: DMSetUp(networkdm);
723: if (viewdm) {
724: PetscPrintf(PETSC_COMM_WORLD,"\nOriginal networkdm, DMView:\n");
725: DMView(networkdm,PETSC_VIEWER_STDOUT_WORLD);
726: }
728: /* Set user physical parameters to the components */
729: for (e = eStart; e < eEnd; e++) {
730: DMNetworkGetConnectedVertices(networkdm,e,&cone);
731: /* vfrom */
732: DMNetworkGetComponent(networkdm,cone[0],0,&vkey,(void**)&junction,NULL);
733: junction->type = (VertexType)vtype[2*e];
735: /* vto */
736: DMNetworkGetComponent(networkdm,cone[1],0,&vkey,(void**)&junction,NULL);
737: junction->type = (VertexType)vtype[2*e+1];
738: }
740: WashNetworkCleanUp(wash);
742: /* Network partitioning and distribution of data */
743: DMNetworkDistribute(&networkdm,0);
744: if (viewdm) {
745: PetscPrintf(PETSC_COMM_WORLD,"\nAfter DMNetworkDistribute, DMView:\n");
746: DMView(networkdm,PETSC_VIEWER_STDOUT_WORLD);
747: }
749: /* create vectors */
750: DMCreateGlobalVector(networkdm,&X);
751: DMCreateLocalVector(networkdm,&wash->localX);
752: DMCreateLocalVector(networkdm,&wash->localXdot);
754: /* PipeSetUp -- each process only sets its own pipes */
755: /*---------------------------------------------------*/
756: DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);
758: userJac = PETSC_TRUE;
759: DMNetworkHasJacobian(networkdm,userJac,userJac);
760: DMNetworkGetEdgeRange(networkdm,&eStart,&eEnd);
761: for (e=eStart; e<eEnd; e++) { /* each edge has only one component, pipe */
762: DMNetworkGetComponent(networkdm,e,0,&type,(void**)&pipe,NULL);
764: wash->nnodes_loc += pipe->nnodes; /* local total number of nodes, will be used by PipesView() */
765: PipeSetParameters(pipe,
766: 600.0, /* length */
767: 0.5, /* diameter */
768: 1200.0, /* a */
769: 0.018); /* friction */
770: PipeSetUp(pipe);
772: if (userJac) {
773: /* Create Jacobian matrix structures for a Pipe */
774: Mat *J;
775: PipeCreateJacobian(pipe,NULL,&J);
776: DMNetworkEdgeSetMatrix(networkdm,e,J);
777: }
778: }
780: if (userJac) {
781: DMNetworkGetVertexRange(networkdm,&vStart,&vEnd);
782: for (v=vStart; v<vEnd; v++) {
783: Mat *J;
784: JunctionCreateJacobian(networkdm,v,NULL,&J);
785: DMNetworkVertexSetMatrix(networkdm,v,J);
787: DMNetworkGetComponent(networkdm,v,0,&vkey,(void**)&junction,NULL);
788: junction->jacobian = J;
789: }
790: }
792: /* Setup solver */
793: /*--------------------------------------------------------*/
794: TSCreate(PETSC_COMM_WORLD,&ts);
796: TSSetDM(ts,(DM)networkdm);
797: TSSetIFunction(ts,NULL,WASHIFunction,wash);
799: TSSetMaxSteps(ts,steps);
800: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
801: TSSetTimeStep(ts,0.1);
802: TSSetType(ts,TSBEULER);
803: if (size == 1 && monipipes) {
804: TSMonitorSet(ts, TSDMNetworkMonitor, monitor, NULL);
805: }
806: TSSetFromOptions(ts);
808: WASHSetInitialSolution(networkdm,X,wash);
810: TSSolve(ts,X);
812: TSGetSolveTime(ts,&ftime);
813: TSGetStepNumber(ts,&steps);
814: TSGetConvergedReason(ts,&reason);
815: PetscPrintf(PETSC_COMM_WORLD,"%s at time %g after %D steps\n",TSConvergedReasons[reason],(double)ftime,steps);
816: if (viewX) {
817: VecView(X,PETSC_VIEWER_STDOUT_WORLD);
818: }
820: viewpipes = PETSC_FALSE;
821: PetscOptionsGetBool(NULL,NULL, "-Jac_view", &viewpipes,NULL);
822: if (viewpipes) {
823: SNES snes;
824: Mat Jac;
825: TSGetSNES(ts,&snes);
826: SNESGetJacobian(snes,&Jac,NULL,NULL,NULL);
827: MatView(Jac,PETSC_VIEWER_DRAW_WORLD);
828: }
830: /* View solutions */
831: /* -------------- */
832: viewpipes = PETSC_FALSE;
833: PetscOptionsGetBool(NULL,NULL, "-pipe_view", &viewpipes,NULL);
834: if (viewpipes) {
835: PipesView(networkdm,KeyPipe,X);
836: }
838: /* Test IS */
839: viewjuncs = PETSC_FALSE;
840: PetscOptionsGetBool(NULL,NULL, "-isjunc_view", &viewjuncs,NULL);
841: if (viewjuncs) {
842: ISJunctionsView(networkdm,KeyJunction);
843: }
845: /* Free spaces */
846: /* ----------- */
847: TSDestroy(&ts);
848: VecDestroy(&X);
849: VecDestroy(&wash->localX);
850: VecDestroy(&wash->localXdot);
852: /* Destroy objects from each pipe that are created in PipeSetUp() */
853: DMNetworkGetEdgeRange(networkdm,&eStart, &eEnd);
854: for (i = eStart; i < eEnd; i++) {
855: DMNetworkGetComponent(networkdm,i,0,&key,(void**)&pipe,NULL);
856: PipeDestroy(&pipe);
857: }
858: if (userJac) {
859: for (v=vStart; v<vEnd; v++) {
860: DMNetworkGetComponent(networkdm,v,0,&vkey,(void**)&junction,NULL);
861: JunctionDestroyJacobian(networkdm,v,junction);
862: }
863: }
865: if (size == 1 && monipipes) {
866: DMNetworkMonitorDestroy(&monitor);
867: }
868: DMDestroy(&networkdm);
869: PetscFree(wash);
871: if (rank) {
872: PetscFree2(junctions,pipes);
873: }
874: PetscFinalize();
875: return ierr;
876: }
878: /*TEST
880: build:
881: depends: pipeInterface.c pipeImpls.c
882: requires: mumps
884: test:
885: args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX
886: localrunfiles: pOption
887: output_file: output/pipes_1.out
889: test:
890: suffix: 2
891: nsize: 2
892: args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
893: localrunfiles: pOption
894: output_file: output/pipes_2.out
896: test:
897: suffix: 3
898: nsize: 2
899: args: -ts_monitor -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
900: localrunfiles: pOption
901: output_file: output/pipes_3.out
903: test:
904: suffix: 4
905: args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -options_left no -viewX
906: localrunfiles: pOption
907: output_file: output/pipes_4.out
909: test:
910: suffix: 5
911: nsize: 3
912: args: -ts_monitor -case 2 -ts_max_steps 10 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -viewX
913: localrunfiles: pOption
914: output_file: output/pipes_5.out
916: test:
917: suffix: 6
918: nsize: 2
919: args: -ts_monitor -case 1 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX
920: localrunfiles: pOption
921: output_file: output/pipes_6.out
923: test:
924: suffix: 7
925: nsize: 2
926: args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -viewX
927: localrunfiles: pOption
928: output_file: output/pipes_7.out
930: test:
931: suffix: 8
932: nsize: 2
933: requires: parmetis
934: args: -ts_monitor -case 2 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type parmetis -options_left no -wash_distribute 1
935: localrunfiles: pOption
936: output_file: output/pipes_8.out
938: test:
939: suffix: 9
940: nsize: 2
941: args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -pipe_view
942: localrunfiles: pOption
943: output_file: output/pipes_9.out
945: test:
946: suffix: 10
947: nsize: 2
948: args: -case 0 -ts_max_steps 1 -pc_factor_mat_solver_type mumps -petscpartitioner_type simple -options_left no -wash_distribute 0 -isjunc_view
949: localrunfiles: pOption
950: output_file: output/pipes_10.out
952: TEST*/