Actual source code: ex21.c

petsc-3.4.2 2013-07-02
  2: static char help[] ="Solves a time-dependent nonlinear PDE with lower and upper bounds on the interior grid points. Uses implicit\n\
  3: timestepping.  Runtime options include:\n\
  4:   -M <xg>, where <xg> = number of grid points\n\
  5:   -debug : Activate debugging printouts\n\
  6:   -nox   : Deactivate x-window graphics\n\
  7:   -ul   : lower bound\n\
  8:   -uh  : upper bound\n\n";

 10: /*
 11:    Concepts: TS^time-dependent nonlinear problems
 12:    Concepts: TS^Variational inequality nonlinear solver
 13:    Processors: n
 14: */

 16: /* ------------------------------------------------------------------------

 18:    This is a variation of ex2.c to solve the PDE

 20:                u * u_xx
 21:          u_t = ---------
 22:                2*(t+1)^2

 24:     with box constraints on the interior grid points
 25:     ul <= u(t,x) <= uh with x != 0,1
 26:     on the domain 0 <= x <= 1, with boundary conditions
 27:          u(t,0) = t + 1,  u(t,1) = 2*t + 2,
 28:     and initial condition
 29:          u(0,x) = 1 + x*x.

 31:     The exact solution is:
 32:          u(t,x) = (1 + x*x) * (1 + t)

 34:     We use by default the backward Euler method.

 36:   ------------------------------------------------------------------------- */

 38: /*
 39:    Include "petscts.h" to use the PETSc timestepping routines. Note that
 40:    this file automatically includes "petscsys.h" and other lower-level
 41:    PETSc include files.

 43:    Include the "petscdmda.h" to allow us to use the distributed array data
 44:    structures to manage the parallel grid.
 45: */
 46: #include <petscts.h>
 47: #include <petscdmda.h>
 48: #include <petscdraw.h>

 50: /*
 51:    User-defined application context - contains data needed by the
 52:    application-provided callback routines.
 53: */
 54: typedef struct {
 55:   MPI_Comm  comm;           /* communicator */
 56:   DM        da;             /* distributed array data structure */
 57:   Vec       localwork;      /* local ghosted work vector */
 58:   Vec       u_local;        /* local ghosted approximate solution vector */
 59:   Vec       solution;       /* global exact solution vector */
 60:   PetscInt  m;              /* total number of grid points */
 61:   PetscReal h;              /* mesh width: h = 1/(m-1) */
 62:   PetscBool debug;          /* flag (1 indicates activation of debugging printouts) */
 63: } AppCtx;

 65: /*
 66:    User-defined routines, provided below.
 67: */
 68: extern PetscErrorCode InitialConditions(Vec,AppCtx*);
 69: extern PetscErrorCode RHSFunction(TS,PetscReal,Vec,Vec,void*);
 70: extern PetscErrorCode RHSJacobian(TS,PetscReal,Vec,Mat*,Mat*,MatStructure*,void*);
 71: extern PetscErrorCode Monitor(TS,PetscInt,PetscReal,Vec,void*);
 72: extern PetscErrorCode ExactSolution(PetscReal,Vec,AppCtx*);
 73: extern PetscErrorCode SetBounds(Vec,Vec,PetscScalar,PetscScalar,AppCtx*);

 77: int main(int argc,char **argv)
 78: {
 79:   AppCtx         appctx;                 /* user-defined application context */
 80:   TS             ts;                     /* timestepping context */
 81:   Mat            A;                      /* Jacobian matrix data structure */
 82:   Vec            u;                      /* approximate solution vector */
 83:   Vec            r;                      /* residual vector */
 84:   PetscInt       time_steps_max = 1000;  /* default max timesteps */
 86:   PetscReal      dt;
 87:   PetscReal      time_total_max = 100.0; /* default max total time */
 88:   Vec            xl,xu; /* Lower and upper bounds on variables */
 89:   PetscScalar    ul=0.0,uh = 3.0;
 90:   PetscBool      mymonitor;
 91:   PetscReal      bounds[] = {1.0, 3.3};

 93:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 94:      Initialize program and set problem parameters
 95:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 97:   PetscInitialize(&argc,&argv,(char*)0,help);
 98:   PetscViewerDrawSetBounds(PETSC_VIEWER_DRAW_(PETSC_COMM_WORLD),1,bounds);

100:   appctx.comm = PETSC_COMM_WORLD;
101:   appctx.m    = 60;
102:   PetscOptionsGetInt(NULL,"-M",&appctx.m,NULL);
103:   PetscOptionsGetScalar(NULL,"-ul",&ul,NULL);
104:   PetscOptionsGetScalar(NULL,"-uh",&uh,NULL);
105:   PetscOptionsHasName(NULL,"-debug",&appctx.debug);
106:   PetscOptionsHasName(NULL,"-mymonitor",&mymonitor);
107:   appctx.h    = 1.0/(appctx.m-1.0);

109:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
110:      Create vector data structures
111:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

113:   /*
114:      Create distributed array (DMDA) to manage parallel grid and vectors
115:      and to set up the ghost point communication pattern.  There are M
116:      total grid values spread equally among all the processors.
117:   */
118:   DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,appctx.m,1,1,NULL,
119:                       &appctx.da);

121:   /*
122:      Extract global and local vectors from DMDA; we use these to store the
123:      approximate solution.  Then duplicate these for remaining vectors that
124:      have the same types.
125:   */
126:   DMCreateGlobalVector(appctx.da,&u);
127:   DMCreateLocalVector(appctx.da,&appctx.u_local);

129:   /*
130:      Create local work vector for use in evaluating right-hand-side function;
131:      create global work vector for storing exact solution.
132:   */
133:   VecDuplicate(appctx.u_local,&appctx.localwork);
134:   VecDuplicate(u,&appctx.solution);

136:   /* Create residual vector */
137:   VecDuplicate(u,&r);
138:   /* Create lower and upper bound vectors */
139:   VecDuplicate(u,&xl);
140:   VecDuplicate(u,&xu);
141:   SetBounds(xl,xu,ul,uh,&appctx);

143:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
144:      Create timestepping solver context; set callback routine for
145:      right-hand-side function evaluation.
146:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

148:   TSCreate(PETSC_COMM_WORLD,&ts);
149:   TSSetProblemType(ts,TS_NONLINEAR);
150:   TSSetRHSFunction(ts,r,RHSFunction,&appctx);

152:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153:      Set optional user-defined monitoring routine
154:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

156:   if (mymonitor) {
157:     TSMonitorSet(ts,Monitor,&appctx,NULL);
158:   }

160:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161:      For nonlinear problems, the user can provide a Jacobian evaluation
162:      routine (or use a finite differencing approximation).

164:      Create matrix data structure; set Jacobian evaluation routine.
165:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

167:   MatCreate(PETSC_COMM_WORLD,&A);
168:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,appctx.m,appctx.m);
169:   MatSetFromOptions(A);
170:   TSSetRHSJacobian(ts,A,A,RHSJacobian,&appctx);

172:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173:      Set solution vector and initial timestep
174:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

176:   dt   = appctx.h/2.0;
177:   TSSetInitialTimeStep(ts,0.0,dt);

179:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
180:      Customize timestepping solver:
181:        - Set the solution method to be the Backward Euler method.
182:        - Set timestepping duration info
183:      Then set runtime options, which can override these defaults.
184:      For example,
185:           -ts_max_steps <maxsteps> -ts_final_time <maxtime>
186:      to override the defaults set by TSSetDuration().
187:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

189:   TSSetType(ts,TSBEULER);
190:   TSSetDuration(ts,time_steps_max,time_total_max);
191:   /* Set lower and upper bound on the solution vector for each time step */
192:   TSVISetVariableBounds(ts,xl,xu);
193:   TSSetFromOptions(ts);

195:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
196:      Solve the problem
197:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

199:   /*
200:      Evaluate initial conditions
201:   */
202:   InitialConditions(u,&appctx);

204:   /*
205:      Run the timestepping solver
206:   */
207:   TSSolve(ts,u);

209:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210:      Free work space.  All PETSc objects should be destroyed when they
211:      are no longer needed.
212:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

214:   VecDestroy(&r);
215:   VecDestroy(&xl);
216:   VecDestroy(&xu);
217:   TSDestroy(&ts);
218:   VecDestroy(&u);
219:   MatDestroy(&A);
220:   DMDestroy(&appctx.da);
221:   VecDestroy(&appctx.localwork);
222:   VecDestroy(&appctx.solution);
223:   VecDestroy(&appctx.u_local);

225:   /*
226:      Always call PetscFinalize() before exiting a program.  This routine
227:        - finalizes the PETSc libraries as well as MPI
228:        - provides summary and diagnostic information if certain runtime
229:          options are chosen (e.g., -log_summary).
230:   */
231:   PetscFinalize();
232:   return 0;
233: }
234: /* --------------------------------------------------------------------- */
237: /*
238:    InitialConditions - Computes the solution at the initial time.

240:    Input Parameters:
241:    u - uninitialized solution vector (global)
242:    appctx - user-defined application context

244:    Output Parameter:
245:    u - vector with solution at initial time (global)
246: */
247: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
248: {
249:   PetscScalar    *u_localptr,h = appctx->h,x;
250:   PetscInt       i,mybase,myend;

253:   /*
254:      Determine starting point of each processor's range of
255:      grid values.
256:   */
257:   VecGetOwnershipRange(u,&mybase,&myend);

259:   /*
260:     Get a pointer to vector data.
261:     - For default PETSc vectors, VecGetArray() returns a pointer to
262:       the data array.  Otherwise, the routine is implementation dependent.
263:     - You MUST call VecRestoreArray() when you no longer need access to
264:       the array.
265:     - Note that the Fortran interface to VecGetArray() differs from the
266:       C version.  See the users manual for details.
267:   */
268:   VecGetArray(u,&u_localptr);

270:   /*
271:      We initialize the solution array by simply writing the solution
272:      directly into the array locations.  Alternatively, we could use
273:      VecSetValues() or VecSetValuesLocal().
274:   */
275:   for (i=mybase; i<myend; i++) {
276:     x = h*(PetscReal)i; /* current location in global grid */
277:     u_localptr[i-mybase] = 1.0 + x*x;
278:   }

280:   /*
281:      Restore vector
282:   */
283:   VecRestoreArray(u,&u_localptr);

285:   /*
286:      Print debugging information if desired
287:   */
288:   if (appctx->debug) {
289:      PetscPrintf(appctx->comm,"initial guess vector\n");
290:      VecView(u,PETSC_VIEWER_STDOUT_WORLD);
291:   }

293:   return 0;
294: }

296: /* --------------------------------------------------------------------- */
299: /*
300:   SetBounds - Sets the lower and uper bounds on the interior points

302:   Input parameters:
303:   xl - vector of lower bounds
304:   xu - vector of upper bounds
305:   ul - constant lower bound for all variables
306:   uh - constant upper bound for all variables
307:   appctx - Application context
308:  */
309: PetscErrorCode SetBounds(Vec xl, Vec xu, PetscScalar ul, PetscScalar uh,AppCtx *appctx)
310: {
312:   PetscScalar    *l,*u;
313:   PetscMPIInt    rank,size;
314:   PetscInt       localsize;

317:   VecSet(xl,ul);
318:   VecSet(xu,uh);
319:   VecGetLocalSize(xl,&localsize);
320:   VecGetArray(xl,&l);
321:   VecGetArray(xu,&u);


324:   MPI_Comm_rank(appctx->comm,&rank);
325:   MPI_Comm_size(appctx->comm,&size);
326:   if (!rank) {
327:     l[0] = -SNES_VI_INF;
328:     u[0] =  SNES_VI_INF;
329:   }
330:   if (rank == size-1) {
331:     l[localsize-1] = -SNES_VI_INF;
332:     u[localsize-1] = SNES_VI_INF;
333:   }
334:   VecRestoreArray(xl,&l);
335:   VecRestoreArray(xu,&u);
336:   return(0);
337: }

339: /* --------------------------------------------------------------------- */
342: /*
343:    ExactSolution - Computes the exact solution at a given time.

345:    Input Parameters:
346:    t - current time
347:    solution - vector in which exact solution will be computed
348:    appctx - user-defined application context

350:    Output Parameter:
351:    solution - vector with the newly computed exact solution
352: */
353: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
354: {
355:   PetscScalar    *s_localptr,h = appctx->h,x;
356:   PetscInt       i,mybase,myend;

359:   /*
360:      Determine starting and ending points of each processor's
361:      range of grid values
362:   */
363:   VecGetOwnershipRange(solution,&mybase,&myend);

365:   /*
366:      Get a pointer to vector data.
367:   */
368:   VecGetArray(solution,&s_localptr);

370:   /*
371:      Simply write the solution directly into the array locations.
372:      Alternatively, we could use VecSetValues() or VecSetValuesLocal().
373:   */
374:   for (i=mybase; i<myend; i++) {
375:     x = h*(PetscReal)i;
376:     s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
377:   }

379:   /*
380:      Restore vector
381:   */
382:   VecRestoreArray(solution,&s_localptr);
383:   return 0;
384: }
385: /* --------------------------------------------------------------------- */
388: /*
389:    Monitor - User-provided routine to monitor the solution computed at
390:    each timestep.  This example plots the solution and computes the
391:    error in two different norms.

393:    Input Parameters:
394:    ts     - the timestep context
395:    step   - the count of the current step (with 0 meaning the
396:             initial condition)
397:    time   - the current time
398:    u      - the solution at this timestep
399:    ctx    - the user-provided context for this monitoring routine.
400:             In this case we use the application context which contains
401:             information about the problem size, workspace and the exact
402:             solution.
403: */
404: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
405: {
406:   AppCtx         *appctx = (AppCtx*) ctx;   /* user-defined application context */
408:   PetscReal      en2,en2s,enmax;
409:   PetscDraw      draw;

411:   /*
412:      We use the default X windows viewer
413:              PETSC_VIEWER_DRAW_(appctx->comm)
414:      that is associated with the current communicator. This saves
415:      the effort of calling PetscViewerDrawOpen() to create the window.
416:      Note that if we wished to plot several items in separate windows we
417:      would create each viewer with PetscViewerDrawOpen() and store them in
418:      the application context, appctx.

420:      PetscReal buffering makes graphics look better.
421:   */
422:   PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_(appctx->comm),0,&draw);
423:   PetscDrawSetDoubleBuffer(draw);
424:   VecView(u,PETSC_VIEWER_DRAW_(appctx->comm));

426:   /*
427:      Compute the exact solution at this timestep
428:   */
429:   ExactSolution(time,appctx->solution,appctx);

431:   /*
432:      Print debugging information if desired
433:   */
434:   if (appctx->debug) {
435:     PetscPrintf(appctx->comm,"Computed solution vector\n");
436:     VecView(u,PETSC_VIEWER_STDOUT_WORLD);
437:     PetscPrintf(appctx->comm,"Exact solution vector\n");
438:     VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
439:   }

441:   /*
442:      Compute the 2-norm and max-norm of the error
443:   */
444:   VecAXPY(appctx->solution,-1.0,u);
445:   VecNorm(appctx->solution,NORM_2,&en2);
446:   en2s = PetscSqrtReal(appctx->h)*en2;  /* scale the 2-norm by the grid spacing */
447:   VecNorm(appctx->solution,NORM_MAX,&enmax);

449:   /*
450:      PetscPrintf() causes only the first processor in this
451:      communicator to print the timestep information.
452:   */
453:   PetscPrintf(appctx->comm,"Timestep %D: time = %G,2-norm error = %G, max norm error = %G\n",
454:                      step,time,en2s,enmax);

456:   /*
457:      Print debugging information if desired
458:    */
459:   /*  if (appctx->debug) {
460:      PetscPrintf(appctx->comm,"Error vector\n");
461:      VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
462:    } */
463:   return 0;
464: }
465: /* --------------------------------------------------------------------- */
468: /*
469:    RHSFunction - User-provided routine that evalues the right-hand-side
470:    function of the ODE.  This routine is set in the main program by
471:    calling TSSetRHSFunction().  We compute:
472:           global_out = F(global_in)

474:    Input Parameters:
475:    ts         - timesteping context
476:    t          - current time
477:    global_in  - vector containing the current iterate
478:    ctx        - (optional) user-provided context for function evaluation.
479:                 In this case we use the appctx defined above.

481:    Output Parameter:
482:    global_out - vector containing the newly evaluated function
483: */
484: PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec global_in,Vec global_out,void *ctx)
485: {
486:   AppCtx         *appctx   = (AppCtx*) ctx;     /* user-defined application context */
487:   DM             da        = appctx->da;        /* distributed array */
488:   Vec            local_in  = appctx->u_local;   /* local ghosted input vector */
489:   Vec            localwork = appctx->localwork; /* local ghosted work vector */
491:   PetscInt       i,localsize;
492:   PetscMPIInt    rank,size;
493:   PetscScalar    *copyptr,*localptr,sc;

495:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
496:      Get ready for local function computations
497:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
498:   /*
499:      Scatter ghost points to local vector, using the 2-step process
500:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
501:      By placing code between these two statements, computations can be
502:      done while messages are in transition.
503:   */
504:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
505:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

507:   /*
508:       Access directly the values in our local INPUT work array
509:   */
510:   VecGetArray(local_in,&localptr);

512:   /*
513:       Access directly the values in our local OUTPUT work array
514:   */
515:   VecGetArray(localwork,&copyptr);

517:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));

519:   /*
520:       Evaluate our function on the nodes owned by this processor
521:   */
522:   VecGetLocalSize(local_in,&localsize);

524:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
525:      Compute entries for the locally owned part
526:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

528:   /*
529:      Handle boundary conditions: This is done by using the boundary condition
530:         u(t,boundary) = g(t,boundary)
531:      for some function g. Now take the derivative with respect to t to obtain
532:         u_{t}(t,boundary) = g_{t}(t,boundary)

534:      In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
535:              and  u(t,1) = 2t+ 2, so that u_{t}(t,1) = 2
536:   */
537:   MPI_Comm_rank(appctx->comm,&rank);
538:   MPI_Comm_size(appctx->comm,&size);
539:   if (!rank) copyptr[0] = 1.0;
540:   if (rank == size-1) copyptr[localsize-1] = (t < .5) ? 2.0 : 0.0;

542:   /*
543:      Handle the interior nodes where the PDE is replace by finite
544:      difference operators.
545:   */
546:   for (i=1; i<localsize-1; i++) copyptr[i] =  localptr[i] * sc * (localptr[i+1] + localptr[i-1] - 2.0*localptr[i]);

548:   /*
549:      Restore vectors
550:   */
551:   VecRestoreArray(local_in,&localptr);
552:   VecRestoreArray(localwork,&copyptr);

554:   /*
555:      Insert values from the local OUTPUT vector into the global
556:      output vector
557:   */
558:   DMLocalToGlobalBegin(da,localwork,INSERT_VALUES,global_out);
559:   DMLocalToGlobalEnd(da,localwork,INSERT_VALUES,global_out);

561:   /* Print debugging information if desired */
562:   /*  if (appctx->debug) {
563:      PetscPrintf(appctx->comm,"RHS function vector\n");
564:      VecView(global_out,PETSC_VIEWER_STDOUT_WORLD);
565:    } */

567:   return 0;
568: }
569: /* --------------------------------------------------------------------- */
572: /*
573:    RHSJacobian - User-provided routine to compute the Jacobian of
574:    the nonlinear right-hand-side function of the ODE.

576:    Input Parameters:
577:    ts - the TS context
578:    t - current time
579:    global_in - global input vector
580:    dummy - optional user-defined context, as set by TSetRHSJacobian()

582:    Output Parameters:
583:    AA - Jacobian matrix
584:    BB - optionally different preconditioning matrix
585:    str - flag indicating matrix structure

587:   Notes:
588:   RHSJacobian computes entries for the locally owned part of the Jacobian.
589:    - Currently, all PETSc parallel matrix formats are partitioned by
590:      contiguous chunks of rows across the processors.
591:    - Each processor needs to insert only elements that it owns
592:      locally (but any non-local elements will be sent to the
593:      appropriate processor during matrix assembly).
594:    - Always specify global row and columns of matrix entries when
595:      using MatSetValues().
596:    - Here, we set all entries for a particular row at once.
597:    - Note that MatSetValues() uses 0-based row and column numbers
598:      in Fortran as well as in C.
599: */
600: PetscErrorCode RHSJacobian(TS ts,PetscReal t,Vec global_in,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
601: {
602:   Mat            B        = *BB;               /* Jacobian matrix */
603:   AppCtx         *appctx  = (AppCtx*)ctx;    /* user-defined application context */
604:   Vec            local_in = appctx->u_local;   /* local ghosted input vector */
605:   DM             da       = appctx->da;        /* distributed array */
606:   PetscScalar    v[3],*localptr,sc;
608:   PetscInt       i,mstart,mend,mstarts,mends,idx[3],is;

610:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
611:      Get ready for local Jacobian computations
612:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
613:   /*
614:      Scatter ghost points to local vector, using the 2-step process
615:         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
616:      By placing code between these two statements, computations can be
617:      done while messages are in transition.
618:   */
619:   DMGlobalToLocalBegin(da,global_in,INSERT_VALUES,local_in);
620:   DMGlobalToLocalEnd(da,global_in,INSERT_VALUES,local_in);

622:   /*
623:      Get pointer to vector data
624:   */
625:   VecGetArray(local_in,&localptr);

627:   /*
628:      Get starting and ending locally owned rows of the matrix
629:   */
630:   MatGetOwnershipRange(B,&mstarts,&mends);
631:   mstart = mstarts; mend = mends;

633:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
634:      Compute entries for the locally owned part of the Jacobian.
635:       - Currently, all PETSc parallel matrix formats are partitioned by
636:         contiguous chunks of rows across the processors.
637:       - Each processor needs to insert only elements that it owns
638:         locally (but any non-local elements will be sent to the
639:         appropriate processor during matrix assembly).
640:       - Here, we set all entries for a particular row at once.
641:       - We can set matrix entries either using either
642:         MatSetValuesLocal() or MatSetValues().
643:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

645:   /*
646:      Set matrix rows corresponding to boundary data
647:   */
648:   if (mstart == 0) {
649:     v[0] = 0.0;
650:     MatSetValues(B,1,&mstart,1,&mstart,v,INSERT_VALUES);
651:     mstart++;
652:   }
653:   if (mend == appctx->m) {
654:     mend--;
655:     v[0] = 0.0;
656:     MatSetValues(B,1,&mend,1,&mend,v,INSERT_VALUES);
657:   }

659:   /*
660:      Set matrix rows corresponding to interior data.  We construct the
661:      matrix one row at a time.
662:   */
663:   sc = 1.0/(appctx->h*appctx->h*2.0*(1.0+t)*(1.0+t));
664:   for (i=mstart; i<mend; i++) {
665:     idx[0] = i-1; idx[1] = i; idx[2] = i+1;
666:     is     = i - mstart + 1;
667:     v[0]   = sc*localptr[is];
668:     v[1]   = sc*(localptr[is+1] + localptr[is-1] - 4.0*localptr[is]);
669:     v[2]   = sc*localptr[is];
670:     MatSetValues(B,1,&i,3,idx,v,INSERT_VALUES);
671:   }

673:   /*
674:      Restore vector
675:   */
676:   VecRestoreArray(local_in,&localptr);

678:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
679:      Complete the matrix assembly process and set some options
680:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
681:   /*
682:      Assemble matrix, using the 2-step process:
683:        MatAssemblyBegin(), MatAssemblyEnd()
684:      Computations can be done while messages are in transition
685:      by placing code between these two statements.
686:   */
687:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
688:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);

690:   /*
691:      Set flag to indicate that the Jacobian matrix retains an identical
692:      nonzero structure throughout all timestepping iterations (although the
693:      values of the entries change). Thus, we can save some work in setting
694:      up the preconditioner (e.g., no need to redo symbolic factorization for
695:      ILU/ICC preconditioners).
696:       - If the nonzero structure of the matrix is different during
697:         successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
698:         must be used instead.  If you are unsure whether the matrix
699:         structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
700:       - Caution:  If you specify SAME_NONZERO_PATTERN, PETSc
701:         believes your assertion and does not check the structure
702:         of the matrix.  If you erroneously claim that the structure
703:         is the same when it actually is not, the new preconditioner
704:         will not function correctly.  Thus, use this optimization
705:         feature with caution!
706:   */
707:   *str = SAME_NONZERO_PATTERN;

709:   /*
710:      Set and option to indicate that we will never add a new nonzero location
711:      to the matrix. If we do, it will generate an error.
712:   */
713:   MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);

715:   return 0;
716: }