Actual source code: ex10.c


  2: /*
  3:    Include "petscsnes.h" so that we can use SNES solvers.  Note that this
  4:    file automatically includes:
  5:      petscsys.h       - base PETSc routines   petscvec.h - vectors
  6:      petscmat.h - matrices
  7:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
  8:      petscviewer.h - viewers               petscpc.h  - preconditioners
  9:      petscksp.h   - linear solvers
 10: */
 11: #include <petscsnes.h>
 12: #include <petscao.h>

 14: static char help[] = "An Unstructured Grid Example.\n\
 15: This example demonstrates how to solve a nonlinear system in parallel\n\
 16: with SNES for an unstructured mesh. The mesh and partitioning information\n\
 17: is read in an application defined ordering,which is later transformed\n\
 18: into another convenient ordering (called the local ordering). The local\n\
 19: ordering, apart from being efficient on cpu cycles and memory, allows\n\
 20: the use of the SPMD model of parallel programming. After partitioning\n\
 21: is done, scatters are created between local (sequential)and global\n\
 22: (distributed) vectors. Finally, we set up the nonlinear solver context\n\
 23: in the usual way as a structured grid  (see\n\
 24: petsc/src/snes/tutorials/ex5.c).\n\
 25: This example also illustrates the use of parallel matrix coloring.\n\
 26: The command line options include:\n\
 27:   -vert <Nv>, where Nv is the global number of nodes\n\
 28:   -elem <Ne>, where Ne is the global number of elements\n\
 29:   -nl_par <lambda>, where lambda is the multiplier for the non linear term (u*u) term\n\
 30:   -lin_par <alpha>, where alpha is the multiplier for the linear term (u)\n\
 31:   -fd_jacobian_coloring -mat_coloring_type lf\n";

 33: /*T
 34:    Concepts: SNES^unstructured grid
 35:    Concepts: AO^application to PETSc ordering or vice versa;
 36:    Concepts: VecScatter^using vector scatter operations;
 37:    Processors: n
 38: T*/

 40: /* ------------------------------------------------------------------------

 42:    PDE Solved : L(u) + lambda*u*u + alpha*u = 0 where L(u) is the Laplacian.

 44:    The Laplacian is approximated in the following way: each edge is given a weight
 45:    of one meaning that the diagonal term will have the weight equal to the degree
 46:    of a node. The off diagonal terms will get a weight of -1.

 48:    -----------------------------------------------------------------------*/

 50: #define MAX_ELEM      500  /* Maximum number of elements */
 51: #define MAX_VERT      100  /* Maximum number of vertices */
 52: #define MAX_VERT_ELEM   3  /* Vertices per element       */

 54: /*
 55:   Application-defined context for problem specific data
 56: */
 57: typedef struct {
 58:   PetscInt   Nvglobal,Nvlocal;              /* global and local number of vertices */
 59:   PetscInt   Neglobal,Nelocal;              /* global and local number of vertices */
 60:   PetscInt   AdjM[MAX_VERT][50];            /* adjacency list of a vertex */
 61:   PetscInt   itot[MAX_VERT];                /* total number of neighbors for a vertex */
 62:   PetscInt   icv[MAX_ELEM][MAX_VERT_ELEM];  /* vertices belonging to an element */
 63:   PetscInt   v2p[MAX_VERT];                 /* processor number for a vertex */
 64:   PetscInt   *locInd,*gloInd;               /* local and global orderings for a node */
 65:   Vec        localX,localF;                 /* local solution (u) and f(u) vectors */
 66:   PetscReal  non_lin_param;                 /* nonlinear parameter for the PDE */
 67:   PetscReal  lin_param;                     /* linear parameter for the PDE */
 68:   VecScatter scatter;                       /* scatter context for the local and
 69:                                                distributed vectors */
 70: } AppCtx;

 72: /*
 73:   User-defined routines
 74: */
 75: PetscErrorCode FormJacobian(SNES,Vec,Mat,Mat,void*);
 76: PetscErrorCode FormFunction(SNES,Vec,Vec,void*);
 77: PetscErrorCode FormInitialGuess(AppCtx*,Vec);

 79: int main(int argc,char **argv)
 80: {
 81:   SNES                   snes;                 /* SNES context */
 82:   SNESType               type = SNESNEWTONLS;  /* default nonlinear solution method */
 83:   Vec                    x,r;                  /* solution, residual vectors */
 84:   Mat                    Jac;                  /* Jacobian matrix */
 85:   AppCtx                 user;                 /* user-defined application context */
 86:   AO                     ao;                   /* Application Ordering object */
 87:   IS                     isglobal,islocal;     /* global and local index sets */
 88:   PetscMPIInt            rank,size;            /* rank of a process, number of processors */
 89:   PetscInt               rstart;               /* starting index of PETSc ordering for a processor */
 90:   PetscInt               nfails;               /* number of unsuccessful Newton steps */
 91:   PetscInt               bs = 1;               /* block size for multicomponent systems */
 92:   PetscInt               nvertices;            /* number of local plus ghost nodes of a processor */
 93:   PetscInt               *pordering;           /* PETSc ordering */
 94:   PetscInt               *vertices;            /* list of all vertices (incl. ghost ones) on a processor */
 95:   PetscInt               *verticesmask;
 96:   PetscInt               *tmp;
 97:   PetscInt               i,j,jstart,inode,nb,nbrs,Nvneighborstotal = 0;
 98:   PetscErrorCode         ierr;
 99:   PetscInt               its,N;
100:   PetscScalar            *xx;
101:   char                   str[256],form[256],part_name[256];
102:   FILE                   *fptr,*fptr1;
103:   ISLocalToGlobalMapping isl2g;
104:   int                    dtmp;
105: #if defined(UNUSED_VARIABLES)
106:   PetscDraw              draw;                 /* drawing context */
107:   PetscScalar            *ff,*gg;
108:   PetscReal              tiny = 1.0e-10,zero = 0.0,one = 1.0,big = 1.0e+10;
109:   PetscInt               *tmp1,*tmp2;
110: #endif
111:   MatFDColoring          matfdcoloring = 0;
112:   PetscBool              fd_jacobian_coloring = PETSC_FALSE;

114:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
115:      Initialize program
116:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

118:    PetscInitialize(&argc,&argv,"options.inf",help);if (ierr) return ierr;
119:   MPI_Comm_rank(MPI_COMM_WORLD,&rank);
120:   MPI_Comm_size(MPI_COMM_WORLD,&size);

122:   /* The current input file options.inf is for 2 proc run only */
123:   if (size != 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"This example currently runs on 2 procs only.");

125:   /*
126:      Initialize problem parameters
127:   */
128:   user.Nvglobal = 16;      /*Global # of vertices  */
129:   user.Neglobal = 18;      /*Global # of elements  */

131:   PetscOptionsGetInt(NULL,NULL,"-vert",&user.Nvglobal,NULL);
132:   PetscOptionsGetInt(NULL,NULL,"-elem",&user.Neglobal,NULL);

134:   user.non_lin_param = 0.06;

136:   PetscOptionsGetReal(NULL,NULL,"-nl_par",&user.non_lin_param,NULL);

138:   user.lin_param = -1.0;

140:   PetscOptionsGetReal(NULL,NULL,"-lin_par",&user.lin_param,NULL);

142:   user.Nvlocal = 0;
143:   user.Nelocal = 0;

145:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146:       Read the mesh and partitioning information
147:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

149:   /*
150:      Read the mesh and partitioning information from 'adj.in'.
151:      The file format is as follows.
152:      For each line the first entry is the processor rank where the
153:      current node belongs. The second entry is the number of
154:      neighbors of a node. The rest of the line is the adjacency
155:      list of a node. Currently this file is set up to work on two
156:      processors.

158:      This is not a very good example of reading input. In the future,
159:      we will put an example that shows the style that should be
160:      used in a real application, where partitioning will be done
161:      dynamically by calling partitioning routines (at present, we have
162:      a  ready interface to ParMeTiS).
163:    */
164:   fptr = fopen("adj.in","r");
165:   if (!fptr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Could not open adj.in");

167:   /*
168:      Each processor writes to the file output.<rank> where rank is the
169:      processor's rank.
170:   */
171:   sprintf(part_name,"output.%d",rank);
172:   fptr1 = fopen(part_name,"w");
173:   if (!fptr1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Could no open output file");
174:   PetscMalloc1(user.Nvglobal,&user.gloInd);
175:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"Rank is %d\n",rank);
176:   for (inode = 0; inode < user.Nvglobal; inode++) {
177:     if (!fgets(str,256,fptr)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_READ,"fgets read failed");
178:     sscanf(str,"%d",&dtmp);user.v2p[inode] = dtmp;
179:     if (user.v2p[inode] == rank) {
180:       PetscFPrintf(PETSC_COMM_SELF,fptr1,"Node %D belongs to processor %D\n",inode,user.v2p[inode]);

182:       user.gloInd[user.Nvlocal] = inode;
183:       sscanf(str,"%*d %d",&dtmp);
184:       nbrs = dtmp;
185:       PetscFPrintf(PETSC_COMM_SELF,fptr1,"Number of neighbors for the vertex %D is %D\n",inode,nbrs);

187:       user.itot[user.Nvlocal] = nbrs;
188:       Nvneighborstotal       += nbrs;
189:       for (i = 0; i < user.itot[user.Nvlocal]; i++) {
190:         form[0]='\0';
191:         for (j=0; j < i+2; j++) {
192:           PetscStrlcat(form,"%*d ",sizeof(form));
193:         }
194:         PetscStrlcat(form,"%d",sizeof(form));

196:         sscanf(str,form,&dtmp);
197:         user.AdjM[user.Nvlocal][i] = dtmp;

199:         PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[user.Nvlocal][i]);
200:       }
201:       PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
202:       user.Nvlocal++;
203:     }
204:   }
205:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"Total # of Local Vertices is %D \n",user.Nvlocal);

207:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208:      Create different orderings
209:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

211:   /*
212:     Create the local ordering list for vertices. First a list using the PETSc global
213:     ordering is created. Then we use the AO object to get the PETSc-to-application and
214:     application-to-PETSc mappings. Each vertex also gets a local index (stored in the
215:     locInd array).
216:   */
217:   MPI_Scan(&user.Nvlocal,&rstart,1,MPIU_INT,MPI_SUM,PETSC_COMM_WORLD);
218:   rstart -= user.Nvlocal;
219:   PetscMalloc1(user.Nvlocal,&pordering);

221:   for (i=0; i < user.Nvlocal; i++) pordering[i] = rstart + i;

223:   /*
224:     Create the AO object
225:   */
226:   AOCreateBasic(MPI_COMM_WORLD,user.Nvlocal,user.gloInd,pordering,&ao);
227:   PetscFree(pordering);

229:   /*
230:     Keep the global indices for later use
231:   */
232:   PetscMalloc1(user.Nvlocal,&user.locInd);
233:   PetscMalloc1(Nvneighborstotal,&tmp);

235:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
236:     Demonstrate the use of AO functionality
237:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

239:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"Before AOApplicationToPetsc, local indices are : \n");
240:   for (i=0; i < user.Nvlocal; i++) {
241:     PetscFPrintf(PETSC_COMM_SELF,fptr1," %D ",user.gloInd[i]);

243:     user.locInd[i] = user.gloInd[i];
244:   }
245:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
246:   jstart = 0;
247:   for (i=0; i < user.Nvlocal; i++) {
248:     PetscFPrintf(PETSC_COMM_SELF,fptr1,"Neghbors of local vertex %D are : ",user.gloInd[i]);
249:     for (j=0; j < user.itot[i]; j++) {
250:       PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[i][j]);

252:       tmp[j + jstart] = user.AdjM[i][j];
253:     }
254:     jstart += user.itot[i];
255:     PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
256:   }

258:   /*
259:     Now map the vlocal and neighbor lists to the PETSc ordering
260:   */
261:   AOApplicationToPetsc(ao,user.Nvlocal,user.locInd);
262:   AOApplicationToPetsc(ao,Nvneighborstotal,tmp);
263:   AODestroy(&ao);

265:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"After AOApplicationToPetsc, local indices are : \n");
266:   for (i=0; i < user.Nvlocal; i++) {
267:     PetscFPrintf(PETSC_COMM_SELF,fptr1," %D ",user.locInd[i]);
268:   }
269:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");

271:   jstart = 0;
272:   for (i=0; i < user.Nvlocal; i++) {
273:     PetscFPrintf(PETSC_COMM_SELF,fptr1,"Neghbors of local vertex %D are : ",user.locInd[i]);
274:     for (j=0; j < user.itot[i]; j++) {
275:       user.AdjM[i][j] = tmp[j+jstart];

277:       PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[i][j]);
278:     }
279:     jstart += user.itot[i];
280:     PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
281:   }

283:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
284:      Extract the ghost vertex information for each processor
285:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
286:   /*
287:    Next, we need to generate a list of vertices required for this processor
288:    and a local numbering scheme for all vertices required on this processor.
289:       vertices - integer array of all vertices needed on this processor in PETSc
290:                  global numbering; this list consists of first the "locally owned"
291:                  vertices followed by the ghost vertices.
292:       verticesmask - integer array that for each global vertex lists its local
293:                      vertex number (in vertices) + 1. If the global vertex is not
294:                      represented on this processor, then the corresponding
295:                      entry in verticesmask is zero

297:       Note: vertices and verticesmask are both Nvglobal in length; this may
298:     sound terribly non-scalable, but in fact is not so bad for a reasonable
299:     number of processors. Importantly, it allows us to use NO SEARCHING
300:     in setting up the data structures.
301:   */
302:   PetscMalloc1(user.Nvglobal,&vertices);
303:   PetscCalloc1(user.Nvglobal,&verticesmask);
304:   nvertices = 0;

306:   /*
307:     First load "owned vertices" into list
308:   */
309:   for (i=0; i < user.Nvlocal; i++) {
310:     vertices[nvertices++]        = user.locInd[i];
311:     verticesmask[user.locInd[i]] = nvertices;
312:   }

314:   /*
315:     Now load ghost vertices into list
316:   */
317:   for (i=0; i < user.Nvlocal; i++) {
318:     for (j=0; j < user.itot[i]; j++) {
319:       nb = user.AdjM[i][j];
320:       if (!verticesmask[nb]) {
321:         vertices[nvertices++] = nb;
322:         verticesmask[nb]      = nvertices;
323:       }
324:     }
325:   }

327:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
328:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"The array vertices is :\n");
329:   for (i=0; i < nvertices; i++) {
330:     PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",vertices[i]);
331:   }
332:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");

334:   /*
335:      Map the vertices listed in the neighbors to the local numbering from
336:     the global ordering that they contained initially.
337:   */
338:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
339:   PetscFPrintf(PETSC_COMM_SELF,fptr1,"After mapping neighbors in the local contiguous ordering\n");
340:   for (i=0; i<user.Nvlocal; i++) {
341:     PetscFPrintf(PETSC_COMM_SELF,fptr1,"Neghbors of local vertex %D are :\n",i);
342:     for (j = 0; j < user.itot[i]; j++) {
343:       nb              = user.AdjM[i][j];
344:       user.AdjM[i][j] = verticesmask[nb] - 1;

346:       PetscFPrintf(PETSC_COMM_SELF,fptr1,"%D ",user.AdjM[i][j]);
347:     }
348:     PetscFPrintf(PETSC_COMM_SELF,fptr1,"\n");
349:   }

351:   N = user.Nvglobal;

353:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
354:      Create vector and matrix data structures
355:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

357:   /*
358:     Create vector data structures
359:   */
360:   VecCreate(MPI_COMM_WORLD,&x);
361:   VecSetSizes(x,user.Nvlocal,N);
362:   VecSetFromOptions(x);
363:   VecDuplicate(x,&r);
364:   VecCreateSeq(MPI_COMM_SELF,bs*nvertices,&user.localX);
365:   VecDuplicate(user.localX,&user.localF);

367:   /*
368:     Create the scatter between the global representation and the
369:     local representation
370:   */
371:   ISCreateStride(MPI_COMM_SELF,bs*nvertices,0,1,&islocal);
372:   ISCreateBlock(MPI_COMM_SELF,bs,nvertices,vertices,PETSC_COPY_VALUES,&isglobal);
373:   VecScatterCreate(x,isglobal,user.localX,islocal,&user.scatter);
374:   ISDestroy(&isglobal);
375:   ISDestroy(&islocal);

377:   /*
378:      Create matrix data structure; Just to keep the example simple, we have not done any
379:      preallocation of memory for the matrix. In real application code with big matrices,
380:      preallocation should always be done to expedite the matrix creation.
381:   */
382:   MatCreate(MPI_COMM_WORLD,&Jac);
383:   MatSetSizes(Jac,PETSC_DECIDE,PETSC_DECIDE,N,N);
384:   MatSetFromOptions(Jac);
385:   MatSetUp(Jac);

387:   /*
388:     The following routine allows us to set the matrix values in local ordering
389:   */
390:   ISLocalToGlobalMappingCreate(MPI_COMM_SELF,bs,nvertices,vertices,PETSC_COPY_VALUES,&isl2g);
391:   MatSetLocalToGlobalMapping(Jac,isl2g,isl2g);

393:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
394:      Create nonlinear solver context
395:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

397:   SNESCreate(MPI_COMM_WORLD,&snes);
398:   SNESSetType(snes,type);

400:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
401:     Set routines for function and Jacobian evaluation
402:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
403:   SNESSetFunction(snes,r,FormFunction,(void*)&user);

405:   PetscOptionsGetBool(NULL,NULL,"-fd_jacobian_coloring",&fd_jacobian_coloring,0);
406:   if (!fd_jacobian_coloring) {
407:     SNESSetJacobian(snes,Jac,Jac,FormJacobian,(void*)&user);
408:   } else {  /* Use matfdcoloring */
409:     ISColoring   iscoloring;
410:     MatColoring  mc;

412:     /* Get the data structure of Jac */
413:     FormJacobian(snes,x,Jac,Jac,&user);
414:     /* Create coloring context */
415:     MatColoringCreate(Jac,&mc);
416:     MatColoringSetType(mc,MATCOLORINGSL);
417:     MatColoringSetFromOptions(mc);
418:     MatColoringApply(mc,&iscoloring);
419:     MatColoringDestroy(&mc);
420:     MatFDColoringCreate(Jac,iscoloring,&matfdcoloring);
421:     MatFDColoringSetFunction(matfdcoloring,(PetscErrorCode (*)(void))FormFunction,&user);
422:     MatFDColoringSetFromOptions(matfdcoloring);
423:     MatFDColoringSetUp(Jac,iscoloring,matfdcoloring);
424:     /* MatFDColoringView(matfdcoloring,PETSC_VIEWER_STDOUT_WORLD); */
425:     SNESSetJacobian(snes,Jac,Jac,SNESComputeJacobianDefaultColor,matfdcoloring);
426:     ISColoringDestroy(&iscoloring);
427:   }

429:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
430:      Customize nonlinear solver; set runtime options
431:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

433:   SNESSetFromOptions(snes);

435:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
436:      Evaluate initial guess; then solve nonlinear system
437:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

439:   /*
440:      Note: The user should initialize the vector, x, with the initial guess
441:      for the nonlinear solver prior to calling SNESSolve().  In particular,
442:      to employ an initial guess of zero, the user should explicitly set
443:      this vector to zero by calling VecSet().
444:   */
445:   FormInitialGuess(&user,x);

447:   /*
448:     Print the initial guess
449:   */
450:   VecGetArray(x,&xx);
451:   for (inode = 0; inode < user.Nvlocal; inode++) {
452:     PetscFPrintf(PETSC_COMM_SELF,fptr1,"Initial Solution at node %D is %f \n",inode,xx[inode]);
453:   }
454:   VecRestoreArray(x,&xx);

456:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
457:      Now solve the nonlinear system
458:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

460:   SNESSolve(snes,NULL,x);
461:   SNESGetIterationNumber(snes,&its);
462:   SNESGetNonlinearStepFailures(snes,&nfails);

464:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
465:     Print the output : solution vector and other information
466:      Each processor writes to the file output.<rank> where rank is the
467:      processor's rank.
468:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

470:   VecGetArray(x,&xx);
471:   for (inode = 0; inode < user.Nvlocal; inode++) {
472:     PetscFPrintf(PETSC_COMM_SELF,fptr1,"Solution at node %D is %f \n",inode,xx[inode]);
473:   }
474:   VecRestoreArray(x,&xx);
475:   fclose(fptr1);
476:   PetscPrintf(MPI_COMM_WORLD,"number of SNES iterations = %D, ",its);
477:   PetscPrintf(MPI_COMM_WORLD,"number of unsuccessful steps = %D\n",nfails);

479:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
480:      Free work space.  All PETSc objects should be destroyed when they
481:      are no longer needed.
482:    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
483:   PetscFree(user.gloInd);
484:   PetscFree(user.locInd);
485:   PetscFree(vertices);
486:   PetscFree(verticesmask);
487:   PetscFree(tmp);
488:   VecScatterDestroy(&user.scatter);
489:   ISLocalToGlobalMappingDestroy(&isl2g);
490:   VecDestroy(&x);
491:   VecDestroy(&r);
492:   VecDestroy(&user.localX);
493:   VecDestroy(&user.localF);
494:   MatDestroy(&Jac);  SNESDestroy(&snes);
495:   /*PetscDrawDestroy(draw);*/
496:   if (fd_jacobian_coloring) {
497:     MatFDColoringDestroy(&matfdcoloring);
498:   }
499:   PetscFinalize();
500:   return ierr;
501: }
502: /* --------------------  Form initial approximation ----------------- */

504: /*
505:    FormInitialGuess - Forms initial approximation.

507:    Input Parameters:
508:    user - user-defined application context
509:    X - vector

511:    Output Parameter:
512:    X - vector
513:  */
514: PetscErrorCode FormInitialGuess(AppCtx *user,Vec X)
515: {
516:   PetscInt    i,Nvlocal,ierr;
517:   PetscInt    *gloInd;
518:   PetscScalar *x;
519: #if defined(UNUSED_VARIABLES)
520:   PetscReal temp1,temp,hx,hy,hxdhy,hydhx,sc;
521:   PetscInt  Neglobal,Nvglobal,j,row;
522:   PetscReal alpha,lambda;

524:   Nvglobal = user->Nvglobal;
525:   Neglobal = user->Neglobal;
526:   lambda   = user->non_lin_param;
527:   alpha    = user->lin_param;
528: #endif

530:   Nvlocal = user->Nvlocal;
531:   gloInd  = user->gloInd;

533:   /*
534:      Get a pointer to vector data.
535:        - For default PETSc vectors, VecGetArray() returns a pointer to
536:          the data array.  Otherwise, the routine is implementation dependent.
537:        - You MUST call VecRestoreArray() when you no longer need access to
538:          the array.
539:   */
540:   VecGetArray(X,&x);

542:   /*
543:      Compute initial guess over the locally owned part of the grid
544:   */
545:   for (i=0; i < Nvlocal; i++) x[i] = (PetscReal)gloInd[i];

547:   /*
548:      Restore vector
549:   */
550:   VecRestoreArray(X,&x);
551:   return 0;
552: }
553: /* --------------------  Evaluate Function F(x) --------------------- */
554: /*
555:    FormFunction - Evaluates nonlinear function, F(x).

557:    Input Parameters:
558: .  snes - the SNES context
559: .  X - input vector
560: .  ptr - optional user-defined context, as set by SNESSetFunction()

562:    Output Parameter:
563: .  F - function vector
564:  */
565: PetscErrorCode FormFunction(SNES snes,Vec X,Vec F,void *ptr)
566: {
567:   AppCtx         *user = (AppCtx*)ptr;
569:   PetscInt       i,j,Nvlocal;
570:   PetscReal      alpha,lambda;
571:   PetscScalar    *x,*f;
572:   VecScatter     scatter;
573:   Vec            localX = user->localX;
574: #if defined(UNUSED_VARIABLES)
575:   PetscScalar ut,ub,ul,ur,u,*g,sc,uyy,uxx;
576:   PetscReal   hx,hy,hxdhy,hydhx;
577:   PetscReal   two = 2.0,one = 1.0;
578:   PetscInt    Nvglobal,Neglobal,row;
579:   PetscInt    *gloInd;

581:   Nvglobal = user->Nvglobal;
582:   Neglobal = user->Neglobal;
583:   gloInd   = user->gloInd;
584: #endif

586:   Nvlocal = user->Nvlocal;
587:   lambda  = user->non_lin_param;
588:   alpha   = user->lin_param;
589:   scatter = user->scatter;

591:   /*
592:      PDE : L(u) + lambda*u*u +alpha*u = 0 where L(u) is the approximate Laplacian as
593:      described in the beginning of this code

595:      First scatter the distributed vector X into local vector localX (that includes
596:      values for ghost nodes. If we wish,we can put some other work between
597:      VecScatterBegin() and VecScatterEnd() to overlap the communication with
598:      computation.
599:  */
600:   VecScatterBegin(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);
601:   VecScatterEnd(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);

603:   /*
604:      Get pointers to vector data
605:   */
606:   VecGetArray(localX,&x);
607:   VecGetArray(F,&f);

609:   /*
610:     Now compute the f(x). As mentioned earlier, the computed Laplacian is just an
611:     approximate one chosen for illustrative purpose only. Another point to notice
612:     is that this is a local (completly parallel) calculation. In practical application
613:     codes, function calculation time is a dominat portion of the overall execution time.
614:   */
615:   for (i=0; i < Nvlocal; i++) {
616:     f[i] = (user->itot[i] - alpha)*x[i] - lambda*x[i]*x[i];
617:     for (j = 0; j < user->itot[i]; j++) f[i] -= x[user->AdjM[i][j]];
618:   }

620:   /*
621:      Restore vectors
622:   */
623:   VecRestoreArray(localX,&x);
624:   VecRestoreArray(F,&f);
625:   /*VecView(F,PETSC_VIEWER_STDOUT_WORLD);*/

627:   return 0;
628: }

630: /* --------------------  Evaluate Jacobian F'(x) -------------------- */
631: /*
632:    FormJacobian - Evaluates Jacobian matrix.

634:    Input Parameters:
635: .  snes - the SNES context
636: .  X - input vector
637: .  ptr - optional user-defined context, as set by SNESSetJacobian()

639:    Output Parameters:
640: .  A - Jacobian matrix
641: .  B - optionally different preconditioning matrix
642: .  flag - flag indicating matrix structure

644: */
645: PetscErrorCode FormJacobian(SNES snes,Vec X,Mat J,Mat jac,void *ptr)
646: {
647:   AppCtx      *user = (AppCtx*)ptr;
648:   PetscInt    i,j,Nvlocal,col[50],ierr;
649:   PetscScalar alpha,lambda,value[50];
650:   Vec         localX = user->localX;
651:   VecScatter  scatter;
652:   PetscScalar *x;
653: #if defined(UNUSED_VARIABLES)
654:   PetscScalar two = 2.0,one = 1.0;
655:   PetscInt    row,Nvglobal,Neglobal;
656:   PetscInt    *gloInd;

658:   Nvglobal = user->Nvglobal;
659:   Neglobal = user->Neglobal;
660:   gloInd   = user->gloInd;
661: #endif

663:   /*printf("Entering into FormJacobian \n");*/
664:   Nvlocal = user->Nvlocal;
665:   lambda  = user->non_lin_param;
666:   alpha   =  user->lin_param;
667:   scatter = user->scatter;

669:   /*
670:      PDE : L(u) + lambda*u*u +alpha*u = 0 where L(u) is the approximate Laplacian as
671:      described in the beginning of this code

673:      First scatter the distributed vector X into local vector localX (that includes
674:      values for ghost nodes. If we wish, we can put some other work between
675:      VecScatterBegin() and VecScatterEnd() to overlap the communication with
676:      computation.
677:   */
678:   VecScatterBegin(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);
679:   VecScatterEnd(scatter,X,localX,INSERT_VALUES,SCATTER_FORWARD);

681:   /*
682:      Get pointer to vector data
683:   */
684:   VecGetArray(localX,&x);

686:   for (i=0; i < Nvlocal; i++) {
687:     col[0]   = i;
688:     value[0] = user->itot[i] - 2.0*lambda*x[i] - alpha;
689:     for (j = 0; j < user->itot[i]; j++) {
690:       col[j+1]   = user->AdjM[i][j];
691:       value[j+1] = -1.0;
692:     }

694:     /*
695:       Set the matrix values in the local ordering. Note that in order to use this
696:       feature we must call the routine MatSetLocalToGlobalMapping() after the
697:       matrix has been created.
698:     */
699:     MatSetValuesLocal(jac,1,&i,1+user->itot[i],col,value,INSERT_VALUES);
700:   }

702:   /*
703:      Assemble matrix, using the 2-step process:
704:        MatAssemblyBegin(), MatAssemblyEnd().
705:      Between these two calls, the pointer to vector data has been restored to
706:      demonstrate the use of overlapping communicationn with computation.
707:   */
708:   MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
709:   VecRestoreArray(localX,&x);
710:   MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);

712:   /*
713:      Tell the matrix we will never add a new nonzero location to the
714:      matrix. If we do, it will generate an error.
715:   */
716:   MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
717:   /* MatView(jac,PETSC_VIEWER_STDOUT_SELF); */
718:   return 0;
719: }

721: /*TEST

723:    build:
724:       requires: !complex

726:    test:
727:       nsize: 2
728:       args: -snes_monitor_short
729:       localrunfiles: options.inf adj.in

731:    test:
732:       suffix: 2
733:       nsize: 2
734:       args: -snes_monitor_short -fd_jacobian_coloring
735:       localrunfiles: options.inf adj.in

737: TEST*/