Actual source code: ex5.c
2: static char help[] = "Demonstrates Pattern Formation with Reaction-Diffusion Equations.\n";
This example is taken from the book, Numerical Solution of Time-Dependent Advection-Diffusion-Reaction Equations by
W. Hundsdorf and J.G. Verwer, Page 21, Pattern Formation with Reaction-Diffusion Equations
\begin{eqnarray*}
u_t = D_1 (u_{xx} + u_{yy}) - u*v^2 + \gamma(1 -u) \\
v_t = D_2 (v_{xx} + v_{yy}) + u*v^2 - (\gamma + \kappa)v
\end{eqnarray*}
Unlike in the book this uses periodic boundary conditions instead of Neumann
(since they are easier for finite differences).
15: /*
16: Helpful runtime monitor options:
17: -ts_monitor_draw_solution
18: -draw_save -draw_save_movie
20: Helpful runtime linear solver options:
21: -pc_type mg -pc_mg_galerkin pmat -da_refine 1 -snes_monitor -ksp_monitor -ts_view (note that these Jacobians are so well-conditioned multigrid may not be the best solver)
23: Point your browser to localhost:8080 to monitor the simulation
24: ./ex5 -ts_view_pre saws -stack_view saws -draw_save -draw_save_single_file -x_virtual -ts_monitor_draw_solution -saws_root .
26: */
28: /*
30: Include "petscdmda.h" so that we can use distributed arrays (DMDAs).
31: Include "petscts.h" so that we can use SNES numerical (ODE) integrators. Note that this
32: file automatically includes:
33: petscsys.h - base PETSc routines petscvec.h - vectors
34: petscmat.h - matrices petscis.h - index sets
35: petscksp.h - Krylov subspace methods petscpc.h - preconditioners
36: petscviewer.h - viewers petscsnes.h - nonlinear solvers
37: */
38: #include "reaction_diffusion.h"
39: #include <petscdm.h>
40: #include <petscdmda.h>
42: /* ------------------------------------------------------------------- */
43: PetscErrorCode InitialConditions(DM da,Vec U)
44: {
46: PetscInt i,j,xs,ys,xm,ym,Mx,My;
47: Field **u;
48: PetscReal hx,hy,x,y;
51: DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);
53: hx = 2.5/(PetscReal)(Mx);
54: hy = 2.5/(PetscReal)(My);
56: /*
57: Get pointers to actual vector data
58: */
59: DMDAVecGetArray(da,U,&u);
61: /*
62: Get local grid boundaries
63: */
64: DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);
66: /*
67: Compute function over the locally owned part of the grid
68: */
69: for (j=ys; j<ys+ym; j++) {
70: y = j*hy;
71: for (i=xs; i<xs+xm; i++) {
72: x = i*hx;
73: if (PetscGTE(x,1.0) && PetscLTE(x,1.5) && PetscGTE(y,1.0) && PetscLTE(y,1.5)) u[j][i].v = PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0)/4.0;
74: else u[j][i].v = 0.0;
76: u[j][i].u = 1.0 - 2.0*u[j][i].v;
77: }
78: }
80: /*
81: Restore access to vector
82: */
83: DMDAVecRestoreArray(da,U,&u);
84: return(0);
85: }
87: int main(int argc,char **argv)
88: {
89: TS ts; /* ODE integrator */
90: Vec x; /* solution */
92: DM da;
93: AppCtx appctx;
95: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
96: Initialize program
97: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
98: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
100: appctx.D1 = 8.0e-5;
101: appctx.D2 = 4.0e-5;
102: appctx.gamma = .024;
103: appctx.kappa = .06;
104: appctx.aijpc = PETSC_FALSE;
106: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107: Create distributed array (DMDA) to manage parallel grid and vectors
108: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109: DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,65,65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);
110: DMSetFromOptions(da);
111: DMSetUp(da);
112: DMDASetFieldName(da,0,"u");
113: DMDASetFieldName(da,1,"v");
115: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
116: Create global vector from DMDA; this will be used to store the solution
117: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
118: DMCreateGlobalVector(da,&x);
120: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: Create timestepping solver context
122: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123: TSCreate(PETSC_COMM_WORLD,&ts);
124: TSSetType(ts,TSARKIMEX);
125: TSARKIMEXSetFullyImplicit(ts,PETSC_TRUE);
126: TSSetDM(ts,da);
127: TSSetProblemType(ts,TS_NONLINEAR);
128: TSSetRHSFunction(ts,NULL,RHSFunction,&appctx);
129: TSSetRHSJacobian(ts,NULL,NULL,RHSJacobian,&appctx);
131: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132: Set initial conditions
133: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
134: InitialConditions(da,x);
135: TSSetSolution(ts,x);
137: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138: Set solver options
139: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
140: TSSetMaxTime(ts,2000.0);
141: TSSetTimeStep(ts,.0001);
142: TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);
143: TSSetFromOptions(ts);
145: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146: Solve ODE system
147: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
148: TSSolve(ts,x);
150: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: Free work space.
152: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153: VecDestroy(&x);
154: TSDestroy(&ts);
155: DMDestroy(&da);
157: PetscFinalize();
158: return ierr;
159: }
161: /*TEST
163: build:
164: depends: reaction_diffusion.c
166: test:
167: args: -ts_view -ts_monitor -ts_max_time 500
168: requires: double
169: timeoutfactor: 3
171: test:
172: suffix: 2
173: args: -ts_view -ts_monitor -ts_max_time 500 -ts_monitor_draw_solution
174: requires: x double
175: output_file: output/ex5_1.out
176: timeoutfactor: 3
178: TEST*/