"Fossies" - the Fresh Open Source Software Archive

Member "petsc-3.12.3/src/ts/examples/tutorials/autodiff/adr_ex5adj.cxx" (29 Sep 2019, 40290 Bytes) of package /linux/misc/petsc-3.12.3.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) C and C++ source code syntax highlighting (style: standard) with prefixed line numbers and code folding option. Alternatively you can here view or download the uninterpreted source code file. See also the last Fossies "Diffs" side-by-side code changes report for "adr_ex5adj.cxx": 3.11.4_vs_3.12.0.

    1 static char help[] = "Demonstrates adjoint sensitivity analysis for Reaction-Diffusion Equations.\n";
    2 
    3 /*
    4    Concepts: TS^time-dependent nonlinear problems
    5    Concepts: Automatic differentiation using ADOL-C
    6 */
    7 /*
    8    REQUIRES configuration of PETSc with option --download-adolc.
    9 
   10    For documentation on ADOL-C, see
   11      $PETSC_ARCH/externalpackages/ADOL-C-2.6.0/ADOL-C/doc/adolc-manual.pdf
   12 
   13    REQUIRES configuration of PETSc with option --download-colpack
   14 
   15    For documentation on ColPack, see
   16      $PETSC_ARCH/externalpackages/git.colpack/README.md
   17 */
   18 /* ------------------------------------------------------------------------
   19   See ../advection-diffusion-reaction/ex5 for a description of the problem
   20   ------------------------------------------------------------------------- */
   21 
   22 /*
   23   Runtime options:
   24 
   25     Solver:
   26       -forwardonly       - Run the forward simulation without adjoint.
   27       -implicitform      - Provide IFunction and IJacobian to TS, if not set, RHSFunction and RHSJacobian will be used.
   28       -aijpc             - Set the preconditioner matrix to be aij (the Jacobian matrix can be of a different type such as ELL).
   29 
   30     Jacobian generation:
   31       -jacobian_by_hand  - Use the hand-coded Jacobian of ex5.c, rather than generating it automatically.
   32       -no_annotation     - Do not annotate ADOL-C active variables. (Should be used alongside -jacobian_by_hand.)
   33       -adolc_sparse      - Calculate Jacobian in compressed format, using ADOL-C sparse functionality.
   34       -adolc_sparse_view - Print sparsity pattern used by -adolc_sparse option.
   35  */
   36 /*
   37   NOTE: If -adolc_sparse option is used, at least four processors should be used, so that no processor owns boundaries which are
   38         identified by the periodic boundary conditions. The number of grid points in both x- and y-directions should be multiples
   39         of 5, in order for the 5-point stencil to be cleanly parallelised.
   40 */
   41 
   42 #include <petscdmda.h>
   43 #include <petscts.h>
   44 #include "adolc-utils/drivers.cxx"
   45 #include <adolc/adolc.h>
   46 
   47 /* (Passive) field for the two variables */
   48 typedef struct {
   49   PetscScalar u,v;
   50 } Field;
   51 
   52 /* Active field for the two variables */
   53 typedef struct {
   54   adouble u,v;
   55 } AField;
   56 
   57 /* Application context */
   58 typedef struct {
   59   PetscReal D1,D2,gamma,kappa;
   60   AField    **u_a,**f_a;
   61   PetscBool aijpc;
   62   AdolcCtx  *adctx; /* Automatic differentation support */
   63 } AppCtx;
   64 
   65 extern PetscErrorCode InitialConditions(DM da,Vec U);
   66 extern PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y);
   67 extern PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr);
   68 extern PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr);
   69 extern PetscErrorCode RHSFunctionActive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr);
   70 extern PetscErrorCode RHSFunctionPassive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr);
   71 extern PetscErrorCode IJacobianByHand(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx);
   72 extern PetscErrorCode IJacobianAdolc(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx);
   73 extern PetscErrorCode RHSJacobianByHand(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx);
   74 extern PetscErrorCode RHSJacobianAdolc(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx);
   75 
   76 int main(int argc,char **argv)
   77 {
   78   TS             ts;                        /* ODE integrator */
   79   Vec            x,r,xdot;                  /* solution, residual, derivative */
   80   PetscErrorCode ierr;
   81   DM             da;
   82   AppCtx         appctx;
   83   AdolcCtx       *adctx;
   84   Vec            lambda[1];
   85   PetscBool      forwardonly=PETSC_FALSE,implicitform=PETSC_FALSE,byhand=PETSC_FALSE;
   86   PetscInt       gxm,gym,i,dofs = 2,ctrl[3] = {0,0,0};
   87   PetscScalar    **Seed = NULL,**Rec = NULL,*u_vec;
   88   unsigned int   **JP = NULL;
   89   ISColoring     iscoloring;
   90 
   91   ierr = PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr;
   92   ierr = PetscNew(&adctx);CHKERRQ(ierr);
   93   ierr = PetscOptionsGetBool(NULL,NULL,"-forwardonly",&forwardonly,NULL);CHKERRQ(ierr);
   94   ierr = PetscOptionsGetBool(NULL,NULL,"-implicitform",&implicitform,NULL);CHKERRQ(ierr);
   95   appctx.aijpc = PETSC_FALSE,adctx->no_an = PETSC_FALSE,adctx->sparse = PETSC_FALSE,adctx->sparse_view = PETSC_FALSE;adctx->sparse_view_done = PETSC_FALSE;
   96   ierr = PetscOptionsGetBool(NULL,NULL,"-aijpc",&appctx.aijpc,NULL);CHKERRQ(ierr);
   97   ierr = PetscOptionsGetBool(NULL,NULL,"-no_annotation",&adctx->no_an,NULL);CHKERRQ(ierr);
   98   ierr = PetscOptionsGetBool(NULL,NULL,"-jacobian_by_hand",&byhand,NULL);CHKERRQ(ierr);
   99   ierr = PetscOptionsGetBool(NULL,NULL,"-adolc_sparse",&adctx->sparse,NULL);CHKERRQ(ierr);
  100   ierr = PetscOptionsGetBool(NULL,NULL,"-adolc_sparse_view",&adctx->sparse_view,NULL);CHKERRQ(ierr);
  101   appctx.D1    = 8.0e-5;
  102   appctx.D2    = 4.0e-5;
  103   appctx.gamma = .024;
  104   appctx.kappa = .06;
  105   appctx.adctx = adctx;
  106 
  107   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  108      Create distributed array (DMDA) to manage parallel grid and vectors
  109   - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  110   ierr = DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_PERIODIC,DM_BOUNDARY_PERIODIC,DMDA_STENCIL_STAR,65,65,PETSC_DECIDE,PETSC_DECIDE,2,1,NULL,NULL,&da);CHKERRQ(ierr);
  111   ierr = DMSetFromOptions(da);CHKERRQ(ierr);
  112   ierr = DMSetUp(da);CHKERRQ(ierr);
  113   ierr = DMDASetFieldName(da,0,"u");CHKERRQ(ierr);
  114   ierr = DMDASetFieldName(da,1,"v");CHKERRQ(ierr);
  115 
  116   /*  - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  117      Extract global vectors from DMDA; then duplicate for remaining
  118      vectors that are the same types
  119    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  120   ierr = DMCreateGlobalVector(da,&x);CHKERRQ(ierr);
  121   ierr = VecDuplicate(x,&r);CHKERRQ(ierr);
  122   ierr = VecDuplicate(x,&xdot);CHKERRQ(ierr);
  123 
  124   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  125      Create timestepping solver context
  126      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  127   ierr = TSCreate(PETSC_COMM_WORLD,&ts);CHKERRQ(ierr);
  128   ierr = TSSetType(ts,TSCN);CHKERRQ(ierr);
  129   ierr = TSSetDM(ts,da);CHKERRQ(ierr);
  130   ierr = TSSetProblemType(ts,TS_NONLINEAR);CHKERRQ(ierr);
  131   if (!implicitform) {
  132     ierr = TSSetRHSFunction(ts,NULL,RHSFunctionPassive,&appctx);CHKERRQ(ierr);
  133   } else {
  134     ierr = DMDATSSetIFunctionLocal(da,INSERT_VALUES,(DMDATSIFunctionLocal)IFunctionLocalPassive,&appctx);CHKERRQ(ierr);
  135   }
  136 
  137   if (!adctx->no_an) {
  138     ierr = DMDAGetGhostCorners(da,NULL,NULL,NULL,&gxm,&gym,NULL);CHKERRQ(ierr);
  139     adctx->m = dofs*gxm*gym;adctx->n = dofs*gxm*gym; /* Number of dependent and independent variables */
  140 
  141     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  142        Trace function(s) just once
  143      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  144     ierr = PetscMalloc1(adctx->n,&u_vec);CHKERRQ(ierr);
  145     if (!implicitform) {
  146       ierr = RHSFunctionActive(ts,1.0,x,r,&appctx);CHKERRQ(ierr);
  147     } else {
  148       ierr = IFunctionActive(ts,1.0,x,xdot,r,&appctx);CHKERRQ(ierr);
  149     }
  150 
  151     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  152       In the case where ADOL-C generates the Jacobian in compressed format,
  153       seed and recovery matrices are required. Since the sparsity structure
  154       of the Jacobian does not change over the course of the time
  155       integration, we can save computational effort by only generating
  156       these objects once.
  157        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  158     if (adctx->sparse) {
  159 
  160       /*
  161          Generate sparsity pattern
  162 
  163          One way to do this is to use the Jacobian pattern driver `jac_pat`
  164          provided by ColPack. Otherwise, it is the responsibility of the user
  165          to obtain the sparsity pattern.
  166       */
  167       ierr = PetscMalloc1(adctx->n,&u_vec);CHKERRQ(ierr);
  168       JP = (unsigned int **) malloc(adctx->m*sizeof(unsigned int*));
  169       jac_pat(1,adctx->m,adctx->n,u_vec,JP,ctrl);
  170       if (adctx->sparse_view) {
  171         ierr = PrintSparsity(MPI_COMM_WORLD,adctx->m,JP);CHKERRQ(ierr);
  172       }
  173 
  174       /*
  175         Extract a column colouring
  176 
  177         For problems using DMDA, colourings can be extracted directly, as
  178         follows. There are also ColPack drivers available. Otherwise, it is the
  179         responsibility of the user to obtain a suitable colouring.
  180       */
  181       ierr = DMCreateColoring(da,IS_COLORING_LOCAL,&iscoloring);CHKERRQ(ierr);
  182       ierr = ISColoringGetIS(iscoloring,PETSC_USE_POINTER,&adctx->p,NULL);CHKERRQ(ierr);
  183 
  184       /* Generate seed matrix to propagate through the forward mode of AD */
  185       ierr = AdolcMalloc2(adctx->n,adctx->p,&Seed);CHKERRQ(ierr);
  186       ierr = GenerateSeedMatrix(iscoloring,Seed);CHKERRQ(ierr);
  187       ierr = ISColoringDestroy(&iscoloring);CHKERRQ(ierr);
  188 
  189       /*
  190         Generate recovery matrix, which is used to recover the Jacobian from
  191         compressed format */
  192       ierr = AdolcMalloc2(adctx->m,adctx->p,&Rec);CHKERRQ(ierr);
  193       ierr = GetRecoveryMatrix(Seed,JP,adctx->m,adctx->p,Rec);CHKERRQ(ierr);
  194 
  195       /* Store results and free workspace */
  196       adctx->Rec = Rec;
  197       for (i=0;i<adctx->m;i++)
  198         free(JP[i]);
  199       free(JP);
  200       ierr = PetscFree(u_vec);CHKERRQ(ierr);
  201 
  202     } else {
  203 
  204       /*
  205         In 'full' Jacobian mode, propagate an identity matrix through the
  206         forward mode of AD.
  207       */
  208       adctx->p = adctx->n;
  209       ierr = AdolcMalloc2(adctx->n,adctx->p,&Seed);CHKERRQ(ierr);
  210       ierr = Identity(adctx->n,Seed);CHKERRQ(ierr);
  211     }
  212     adctx->Seed = Seed;
  213   }
  214 
  215   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  216      Set Jacobian
  217    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  218   if (!implicitform) {
  219     if (!byhand) {
  220       ierr = TSSetRHSJacobian(ts,NULL,NULL,RHSJacobianAdolc,&appctx);CHKERRQ(ierr);
  221     } else {
  222       ierr = TSSetRHSJacobian(ts,NULL,NULL,RHSJacobianByHand,&appctx);CHKERRQ(ierr);
  223     }
  224   } else {
  225     if (appctx.aijpc) {
  226       Mat A,B;
  227 
  228       ierr = DMSetMatType(da,MATSELL);CHKERRQ(ierr);
  229       ierr = DMCreateMatrix(da,&A);CHKERRQ(ierr);
  230       ierr = MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&B);CHKERRQ(ierr);
  231       /* FIXME do we need to change viewer to display matrix in natural ordering as DMCreateMatrix_DA does? */
  232       if (!byhand) {
  233         ierr = TSSetIJacobian(ts,A,B,IJacobianAdolc,&appctx);CHKERRQ(ierr);
  234       } else {
  235         ierr = TSSetIJacobian(ts,A,B,IJacobianByHand,&appctx);CHKERRQ(ierr);
  236       }
  237       ierr = MatDestroy(&A);CHKERRQ(ierr);
  238       ierr = MatDestroy(&B);CHKERRQ(ierr);
  239     } else {
  240       if (!byhand) {
  241         ierr = TSSetIJacobian(ts,NULL,NULL,IJacobianAdolc,&appctx);CHKERRQ(ierr);
  242       } else {
  243         ierr = TSSetIJacobian(ts,NULL,NULL,IJacobianByHand,&appctx);CHKERRQ(ierr);
  244       }
  245     }
  246   }
  247 
  248   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  249      Set initial conditions
  250    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  251   ierr = InitialConditions(da,x);CHKERRQ(ierr);
  252   ierr = TSSetSolution(ts,x);CHKERRQ(ierr);
  253 
  254   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  255     Have the TS save its trajectory so that TSAdjointSolve() may be used
  256     and set solver options
  257    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  258   if (!forwardonly) {
  259     ierr = TSSetSaveTrajectory(ts);CHKERRQ(ierr);
  260     ierr = TSSetMaxTime(ts,200.0);CHKERRQ(ierr);
  261     ierr = TSSetTimeStep(ts,0.5);CHKERRQ(ierr);
  262   } else {
  263     ierr = TSSetMaxTime(ts,2000.0);CHKERRQ(ierr);
  264     ierr = TSSetTimeStep(ts,10);CHKERRQ(ierr);
  265   }
  266   ierr = TSSetExactFinalTime(ts,TS_EXACTFINALTIME_STEPOVER);CHKERRQ(ierr);
  267   ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
  268 
  269   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  270      Solve ODE system
  271      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  272   ierr = TSSolve(ts,x);CHKERRQ(ierr);
  273   if (!forwardonly) {
  274     /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  275        Start the Adjoint model
  276        - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  277     ierr = VecDuplicate(x,&lambda[0]);CHKERRQ(ierr);
  278     /*   Reset initial conditions for the adjoint integration */
  279     ierr = InitializeLambda(da,lambda[0],0.5,0.5);CHKERRQ(ierr);
  280     ierr = TSSetCostGradients(ts,1,lambda,NULL);CHKERRQ(ierr);
  281     ierr = TSAdjointSolve(ts);CHKERRQ(ierr);
  282     ierr = VecDestroy(&lambda[0]);CHKERRQ(ierr);
  283   }
  284 
  285   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  286      Free work space.
  287    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
  288   ierr = VecDestroy(&xdot);CHKERRQ(ierr);
  289   ierr = VecDestroy(&r);CHKERRQ(ierr);
  290   ierr = VecDestroy(&x);CHKERRQ(ierr);
  291   ierr = TSDestroy(&ts);CHKERRQ(ierr);
  292   if (!adctx->no_an) {
  293     if (adctx->sparse)
  294       ierr = AdolcFree2(Rec);CHKERRQ(ierr);
  295     ierr = AdolcFree2(Seed);CHKERRQ(ierr);
  296   }
  297   ierr = DMDestroy(&da);CHKERRQ(ierr);
  298   ierr = PetscFree(adctx);CHKERRQ(ierr);
  299   ierr = PetscFinalize();
  300   return ierr;
  301 }
  302 
  303 PetscErrorCode InitialConditions(DM da,Vec U)
  304 {
  305   PetscErrorCode ierr;
  306   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
  307   Field          **u;
  308   PetscReal      hx,hy,x,y;
  309 
  310   PetscFunctionBegin;
  311   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
  312 
  313   hx = 2.5/(PetscReal)Mx;
  314   hy = 2.5/(PetscReal)My;
  315 
  316   /*
  317      Get pointers to vector data
  318   */
  319   ierr = DMDAVecGetArray(da,U,&u);CHKERRQ(ierr);
  320 
  321   /*
  322      Get local grid boundaries
  323   */
  324   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
  325 
  326   /*
  327      Compute function over the locally owned part of the grid
  328   */
  329   for (j=ys; j<ys+ym; j++) {
  330     y = j*hy;
  331     for (i=xs; i<xs+xm; i++) {
  332       x = i*hx;
  333       if ((1.0 <= x) && (x <= 1.5) && (1.0 <= y) && (y <= 1.5)) u[j][i].v = .25*PetscPowReal(PetscSinReal(4.0*PETSC_PI*x),2.0)*PetscPowReal(PetscSinReal(4.0*PETSC_PI*y),2.0);
  334       else u[j][i].v = 0.0;
  335 
  336       u[j][i].u = 1.0 - 2.0*u[j][i].v;
  337     }
  338   }
  339 
  340   /*
  341      Restore vectors
  342   */
  343   ierr = DMDAVecRestoreArray(da,U,&u);CHKERRQ(ierr);
  344   PetscFunctionReturn(0);
  345 }
  346 
  347 PetscErrorCode InitializeLambda(DM da,Vec lambda,PetscReal x,PetscReal y)
  348 {
  349    PetscInt i,j,Mx,My,xs,ys,xm,ym;
  350    PetscErrorCode ierr;
  351    Field **l;
  352    PetscFunctionBegin;
  353 
  354    ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
  355    /* locate the global i index for x and j index for y */
  356    i = (PetscInt)(x*(Mx-1));
  357    j = (PetscInt)(y*(My-1));
  358    ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
  359 
  360    if (xs <= i && i < xs+xm && ys <= j && j < ys+ym) {
  361      /* the i,j vertex is on this process */
  362      ierr = DMDAVecGetArray(da,lambda,&l);CHKERRQ(ierr);
  363      l[j][i].u = 1.0;
  364      l[j][i].v = 1.0;
  365      ierr = DMDAVecRestoreArray(da,lambda,&l);CHKERRQ(ierr);
  366    }
  367    PetscFunctionReturn(0);
  368 }
  369 
  370 PetscErrorCode IFunctionLocalPassive(DMDALocalInfo *info,PetscReal t,Field**u,Field**udot,Field**f,void *ptr)
  371 {
  372   AppCtx         *appctx = (AppCtx*)ptr;
  373   PetscInt       i,j,xs,ys,xm,ym;
  374   PetscReal      hx,hy,sx,sy;
  375   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
  376   PetscErrorCode ierr;
  377 
  378   PetscFunctionBegin;
  379   hx = 2.50/(PetscReal)(info->mx); sx = 1.0/(hx*hx);
  380   hy = 2.50/(PetscReal)(info->my); sy = 1.0/(hy*hy);
  381 
  382   /* Get local grid boundaries */
  383   xs = info->xs; xm = info->xm; ys = info->ys; ym = info->ym;
  384 
  385   /* Compute function over the locally owned part of the grid */
  386   for (j=ys; j<ys+ym; j++) {
  387     for (i=xs; i<xs+xm; i++) {
  388       uc        = u[j][i].u;
  389       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
  390       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
  391       vc        = u[j][i].v;
  392       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
  393       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
  394       f[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
  395       f[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
  396     }
  397   }
  398   ierr = PetscLogFlops(16*xm*ym);CHKERRQ(ierr);
  399   PetscFunctionReturn(0);
  400 }
  401 
  402 PetscErrorCode IFunctionActive(TS ts,PetscReal ftime,Vec U,Vec Udot,Vec F,void *ptr)
  403 {
  404   PetscErrorCode ierr;
  405   AppCtx         *appctx = (AppCtx*)ptr;
  406   DM             da;
  407   DMDALocalInfo  info;
  408   Field          **u,**f,**udot;
  409   Vec            localU;
  410   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym;
  411   PetscReal      hx,hy,sx,sy;
  412   adouble        uc,uxx,uyy,vc,vxx,vyy;
  413   AField         **f_a,*f_c,**u_a,*u_c;
  414   PetscScalar    dummy;
  415 
  416   PetscFunctionBegin;
  417 
  418   ierr = TSGetDM(ts,&da);CHKERRQ(ierr);
  419   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
  420   ierr = DMGetLocalVector(da,&localU);CHKERRQ(ierr);
  421   hx = 2.50/(PetscReal)(info.mx); sx = 1.0/(hx*hx);
  422   hy = 2.50/(PetscReal)(info.my); sy = 1.0/(hy*hy);
  423   xs = info.xs; xm = info.xm; gxs = info.gxs; gxm = info.gxm;
  424   ys = info.ys; ym = info.ym; gys = info.gys; gym = info.gym;
  425 
  426   /*
  427      Scatter ghost points to local vector,using the 2-step process
  428         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
  429      By placing code between these two statements, computations can be
  430      done while messages are in transition.
  431   */
  432   ierr = DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  433   ierr = DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  434 
  435   /*
  436      Get pointers to vector data
  437   */
  438   ierr = DMDAVecGetArrayRead(da,localU,&u);CHKERRQ(ierr);
  439   ierr = DMDAVecGetArray(da,F,&f);CHKERRQ(ierr);
  440   ierr = DMDAVecGetArrayRead(da,Udot,&udot);CHKERRQ(ierr);
  441 
  442   /*
  443     Create contiguous 1-arrays of AFields
  444 
  445     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
  446           cannot be allocated using PetscMalloc, as this does not call the
  447           relevant class constructor. Instead, we use the C++ keyword `new`.
  448   */
  449   u_c = new AField[info.gxm*info.gym];
  450   f_c = new AField[info.gxm*info.gym];
  451 
  452   /* Create corresponding 2-arrays of AFields */
  453   u_a = new AField*[info.gym];
  454   f_a = new AField*[info.gym];
  455 
  456   /* Align indices between array types to endow 2d array with ghost points */
  457   ierr = GiveGhostPoints(da,u_c,&u_a);CHKERRQ(ierr);
  458   ierr = GiveGhostPoints(da,f_c,&f_a);CHKERRQ(ierr);
  459 
  460   trace_on(1);  /* Start of active section on tape 1 */
  461 
  462   /*
  463     Mark independence
  464 
  465     NOTE: Ghost points are marked as independent, in place of the points they represent on
  466           other processors / on other boundaries.
  467   */
  468   for (j=gys; j<gys+gym; j++) {
  469     for (i=gxs; i<gxs+gxm; i++) {
  470       u_a[j][i].u <<= u[j][i].u;
  471       u_a[j][i].v <<= u[j][i].v;
  472     }
  473   }
  474 
  475   /* Compute function over the locally owned part of the grid */
  476   for (j=ys; j<ys+ym; j++) {
  477     for (i=xs; i<xs+xm; i++) {
  478       uc        = u_a[j][i].u;
  479       uxx       = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
  480       uyy       = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
  481       vc        = u_a[j][i].v;
  482       vxx       = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
  483       vyy       = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
  484       f_a[j][i].u = udot[j][i].u - appctx->D1*(uxx + uyy) + uc*vc*vc - appctx->gamma*(1.0 - uc);
  485       f_a[j][i].v = udot[j][i].v - appctx->D2*(vxx + vyy) - uc*vc*vc + (appctx->gamma + appctx->kappa)*vc;
  486     }
  487   }
  488 
  489   /*
  490     Mark dependence
  491 
  492     NOTE: Marking dependence of dummy variables makes the index notation much simpler when forming
  493           the Jacobian later.
  494   */
  495   for (j=gys; j<gys+gym; j++) {
  496     for (i=gxs; i<gxs+gxm; i++) {
  497       if ((i < xs) || (i >= xs+xm) || (j < ys) || (j >= ys+ym)) {
  498         f_a[j][i].u >>= dummy;
  499         f_a[j][i].v >>= dummy;
  500       } else {
  501         f_a[j][i].u >>= f[j][i].u;
  502         f_a[j][i].v >>= f[j][i].v;
  503       }
  504     }
  505   }
  506   trace_off();  /* End of active section */
  507   ierr = PetscLogFlops(16*xm*ym);CHKERRQ(ierr);
  508 
  509   /* Restore vectors */
  510   ierr = DMDAVecRestoreArray(da,F,&f);CHKERRQ(ierr);
  511   ierr = DMDAVecRestoreArrayRead(da,localU,&u);CHKERRQ(ierr);
  512   ierr = DMDAVecRestoreArrayRead(da,Udot,&udot);CHKERRQ(ierr);
  513 
  514   /* Destroy AFields appropriately */
  515   f_a += info.gys;
  516   u_a += info.gys;
  517   delete[] f_a;
  518   delete[] u_a;
  519   delete[] f_c;
  520   delete[] u_c;
  521 
  522   PetscFunctionReturn(0);
  523 }
  524 
  525 PetscErrorCode RHSFunctionPassive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
  526 {
  527   AppCtx         *appctx = (AppCtx*)ptr;
  528   DM             da;
  529   PetscErrorCode ierr;
  530   PetscInt       i,j,xs,ys,xm,ym,Mx,My;
  531   PetscReal      hx,hy,sx,sy;
  532   PetscScalar    uc,uxx,uyy,vc,vxx,vyy;
  533   Field          **u,**f;
  534   Vec            localU,localF;
  535 
  536   PetscFunctionBegin;
  537   ierr = TSGetDM(ts,&da);CHKERRQ(ierr);
  538   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
  539   hx = 2.50/(PetscReal)(Mx);sx = 1.0/(hx*hx);
  540   hy = 2.50/(PetscReal)(My);sy = 1.0/(hy*hy);
  541   ierr = DMGetLocalVector(da,&localU);CHKERRQ(ierr);
  542   ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr);
  543 
  544   /*
  545      Scatter ghost points to local vector,using the 2-step process
  546         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
  547      By placing code between these two statements, computations can be
  548      done while messages are in transition.
  549   */
  550   ierr = DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  551   ierr = DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  552   ierr = VecZeroEntries(F);CHKERRQ(ierr); // NOTE (1): See (2) below
  553   ierr = DMGlobalToLocalBegin(da,F,INSERT_VALUES,localF);CHKERRQ(ierr);
  554   ierr = DMGlobalToLocalEnd(da,F,INSERT_VALUES,localF);CHKERRQ(ierr);
  555 
  556   /*
  557      Get pointers to vector data
  558   */
  559   ierr = DMDAVecGetArrayRead(da,localU,&u);CHKERRQ(ierr);
  560   ierr = DMDAVecGetArray(da,localF,&f);CHKERRQ(ierr);
  561 
  562   /*
  563      Get local grid boundaries
  564   */
  565   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
  566 
  567   /*
  568      Compute function over the locally owned part of the grid
  569   */
  570   for (j=ys; j<ys+ym; j++) {
  571     for (i=xs; i<xs+xm; i++) {
  572       uc        = u[j][i].u;
  573       uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
  574       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
  575       vc        = u[j][i].v;
  576       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
  577       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
  578       f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
  579       f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
  580     }
  581   }
  582 
  583   /*
  584      Gather global vector, using the 2-step process
  585         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
  586 
  587      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or 
  588                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
  589   */
  590   ierr = DMLocalToGlobalBegin(da,localF,ADD_VALUES,F);CHKERRQ(ierr);
  591   ierr = DMLocalToGlobalEnd(da,localF,ADD_VALUES,F);CHKERRQ(ierr);
  592 
  593   /*
  594      Restore vectors
  595   */
  596   ierr = DMDAVecRestoreArray(da,localF,&f);CHKERRQ(ierr);
  597   ierr = DMDAVecRestoreArrayRead(da,localU,&u);CHKERRQ(ierr);
  598   ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr);
  599   ierr = DMRestoreLocalVector(da,&localU);CHKERRQ(ierr);
  600   ierr = PetscLogFlops(16*xm*ym);CHKERRQ(ierr);
  601   PetscFunctionReturn(0);
  602 }
  603 
  604 PetscErrorCode RHSFunctionActive(TS ts,PetscReal ftime,Vec U,Vec F,void *ptr)
  605 {
  606   AppCtx         *appctx = (AppCtx*)ptr;
  607   DM             da;
  608   PetscErrorCode ierr;
  609   PetscInt       i,j,xs,ys,xm,ym,gxs,gys,gxm,gym,Mx,My;
  610   PetscReal      hx,hy,sx,sy;
  611   AField         **f_a,*f_c,**u_a,*u_c;
  612   adouble        uc,uxx,uyy,vc,vxx,vyy;
  613   Field          **u,**f;
  614   Vec            localU,localF;
  615 
  616   PetscFunctionBegin;
  617   ierr = TSGetDM(ts,&da);CHKERRQ(ierr);
  618   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
  619   hx = 2.50/(PetscReal)(Mx);sx = 1.0/(hx*hx);
  620   hy = 2.50/(PetscReal)(My);sy = 1.0/(hy*hy);
  621   ierr = DMGetLocalVector(da,&localU);CHKERRQ(ierr);
  622   ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr);
  623 
  624   /*
  625      Scatter ghost points to local vector,using the 2-step process
  626         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
  627      By placing code between these two statements, computations can be
  628      done while messages are in transition.
  629   */
  630   ierr = DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  631   ierr = DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  632   ierr = VecZeroEntries(F);CHKERRQ(ierr); // NOTE (1): See (2) below
  633   ierr = DMGlobalToLocalBegin(da,F,INSERT_VALUES,localF);CHKERRQ(ierr);
  634   ierr = DMGlobalToLocalEnd(da,F,INSERT_VALUES,localF);CHKERRQ(ierr);
  635 
  636   /*
  637      Get pointers to vector data
  638   */
  639   ierr = DMDAVecGetArrayRead(da,localU,&u);CHKERRQ(ierr);
  640   ierr = DMDAVecGetArray(da,localF,&f);CHKERRQ(ierr);
  641 
  642   /*
  643      Get local and ghosted grid boundaries
  644   */
  645   ierr = DMDAGetGhostCorners(da,&gxs,&gys,NULL,&gxm,&gym,NULL);CHKERRQ(ierr);
  646   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
  647 
  648   /*
  649     Create contiguous 1-arrays of AFields
  650 
  651     NOTE: Memory for ADOL-C active variables (such as adouble and AField)
  652           cannot be allocated using PetscMalloc, as this does not call the
  653           relevant class constructor. Instead, we use the C++ keyword `new`.
  654   */
  655   u_c = new AField[gxm*gym];
  656   f_c = new AField[gxm*gym];
  657 
  658   /* Create corresponding 2-arrays of AFields */
  659   u_a = new AField*[gym];
  660   f_a = new AField*[gym];
  661 
  662   /* Align indices between array types to endow 2d array with ghost points */
  663   ierr = GiveGhostPoints(da,u_c,&u_a);CHKERRQ(ierr);
  664   ierr = GiveGhostPoints(da,f_c,&f_a);CHKERRQ(ierr);
  665 
  666   /*
  667      Compute function over the locally owned part of the grid
  668   */
  669   trace_on(1);  // ----------------------------------------------- Start of active section
  670 
  671   /*
  672     Mark independence
  673 
  674     NOTE: Ghost points are marked as independent, in place of the points they represent on
  675           other processors / on other boundaries.
  676   */
  677   for (j=gys; j<gys+gym; j++) {
  678     for (i=gxs; i<gxs+gxm; i++) {
  679       u_a[j][i].u <<= u[j][i].u;
  680       u_a[j][i].v <<= u[j][i].v;
  681     }
  682   }
  683 
  684   /*
  685     Compute function over the locally owned part of the grid
  686   */
  687   for (j=ys; j<ys+ym; j++) {
  688     for (i=xs; i<xs+xm; i++) {
  689       uc          = u_a[j][i].u;
  690       uxx         = (-2.0*uc + u_a[j][i-1].u + u_a[j][i+1].u)*sx;
  691       uyy         = (-2.0*uc + u_a[j-1][i].u + u_a[j+1][i].u)*sy;
  692       vc          = u_a[j][i].v;
  693       vxx         = (-2.0*vc + u_a[j][i-1].v + u_a[j][i+1].v)*sx;
  694       vyy         = (-2.0*vc + u_a[j-1][i].v + u_a[j+1][i].v)*sy;
  695       f_a[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);
  696       f_a[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc;
  697     }
  698   }
  699   /*
  700     Mark dependence
  701 
  702     NOTE: Ghost points are marked as dependent in order to vastly simplify index notation
  703           during Jacobian assembly.
  704   */
  705   for (j=gys; j<gys+gym; j++) {
  706     for (i=gxs; i<gxs+gxm; i++) {
  707       f_a[j][i].u >>= f[j][i].u;
  708       f_a[j][i].v >>= f[j][i].v;
  709     }
  710   }
  711   trace_off();  // ----------------------------------------------- End of active section
  712 
  713   /* Test zeroth order scalar evaluation in ADOL-C gives the same result */
  714 //  if (appctx->adctx->zos) {
  715 //    ierr = TestZOS2d(da,f,u,appctx);CHKERRQ(ierr); // FIXME: Why does this give nonzero?
  716 //  }
  717 
  718   /*
  719      Gather global vector, using the 2-step process
  720         DMLocalToGlobalBegin(),DMLocalToGlobalEnd().
  721 
  722      NOTE (2): We need to use ADD_VALUES if boundaries are not of type DM_BOUNDARY_NONE or 
  723                DM_BOUNDARY_GHOSTED, meaning we should also zero F before addition (see (1) above).
  724   */
  725   ierr = DMLocalToGlobalBegin(da,localF,ADD_VALUES,F);CHKERRQ(ierr);
  726   ierr = DMLocalToGlobalEnd(da,localF,ADD_VALUES,F);CHKERRQ(ierr);
  727 
  728   /*
  729      Restore vectors
  730   */
  731   ierr = DMDAVecRestoreArray(da,localF,&f);CHKERRQ(ierr);
  732   ierr = DMDAVecRestoreArrayRead(da,localU,&u);CHKERRQ(ierr);
  733   ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr);
  734   ierr = DMRestoreLocalVector(da,&localU);CHKERRQ(ierr);
  735   ierr = PetscLogFlops(16*xm*ym);CHKERRQ(ierr);
  736 
  737   /* Destroy AFields appropriately */
  738   f_a += gys;
  739   u_a += gys;
  740   delete[] f_a;
  741   delete[] u_a;
  742   delete[] f_c;
  743   delete[] u_c;
  744 
  745   PetscFunctionReturn(0);
  746 }
  747 
  748 PetscErrorCode IJacobianAdolc(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx)
  749 {
  750   AppCtx         *appctx = (AppCtx*)ctx;
  751   DM             da;
  752   PetscErrorCode ierr;
  753   PetscScalar    *u_vec;
  754   Vec            localU;
  755 
  756   PetscFunctionBegin;
  757   ierr = TSGetDM(ts,&da);CHKERRQ(ierr);
  758   ierr = DMGetLocalVector(da,&localU);CHKERRQ(ierr);
  759 
  760   /*
  761      Scatter ghost points to local vector,using the 2-step process
  762         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
  763      By placing code between these two statements, computations can be
  764      done while messages are in transition.
  765   */
  766   ierr = DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  767   ierr = DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  768 
  769   /* Get pointers to vector data */
  770   ierr = VecGetArray(localU,&u_vec);CHKERRQ(ierr);
  771 
  772   /*
  773     Compute Jacobian
  774   */
  775   ierr = PetscAdolcComputeIJacobianLocalIDMass(1,A,u_vec,a,appctx->adctx);CHKERRQ(ierr);
  776 
  777   /*
  778      Restore vectors
  779   */
  780   ierr = VecRestoreArray(localU,&u_vec);CHKERRQ(ierr);
  781   ierr = DMRestoreLocalVector(da,&localU);CHKERRQ(ierr);
  782   PetscFunctionReturn(0);
  783 }
  784 
  785 PetscErrorCode IJacobianByHand(TS ts,PetscReal t,Vec U,Vec Udot,PetscReal a,Mat A,Mat B,void *ctx)
  786 {
  787   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
  788   DM             da;
  789   PetscErrorCode ierr;
  790   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
  791   PetscReal      hx,hy,sx,sy;
  792   PetscScalar    uc,vc;
  793   Field          **u;
  794   Vec            localU;
  795   MatStencil     stencil[6],rowstencil;
  796   PetscScalar    entries[6];
  797 
  798   PetscFunctionBegin;
  799   ierr = TSGetDM(ts,&da);CHKERRQ(ierr);
  800   ierr = DMGetLocalVector(da,&localU);CHKERRQ(ierr);
  801   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
  802 
  803   hx = 2.50/(PetscReal)Mx; sx = 1.0/(hx*hx);
  804   hy = 2.50/(PetscReal)My; sy = 1.0/(hy*hy);
  805 
  806   /*
  807      Scatter ghost points to local vector,using the 2-step process
  808         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
  809      By placing code between these two statements, computations can be
  810      done while messages are in transition.
  811   */
  812   ierr = DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  813   ierr = DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  814 
  815   /*
  816      Get pointers to vector data
  817   */
  818   ierr = DMDAVecGetArrayRead(da,localU,&u);CHKERRQ(ierr);
  819 
  820   /*
  821      Get local grid boundaries
  822   */
  823   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
  824 
  825   stencil[0].k = 0;
  826   stencil[1].k = 0;
  827   stencil[2].k = 0;
  828   stencil[3].k = 0;
  829   stencil[4].k = 0;
  830   stencil[5].k = 0;
  831   rowstencil.k = 0;
  832   /*
  833      Compute function over the locally owned part of the grid
  834   */
  835   for (j=ys; j<ys+ym; j++) {
  836 
  837     stencil[0].j = j-1;
  838     stencil[1].j = j+1;
  839     stencil[2].j = j;
  840     stencil[3].j = j;
  841     stencil[4].j = j;
  842     stencil[5].j = j;
  843     rowstencil.k = 0; rowstencil.j = j;
  844     for (i=xs; i<xs+xm; i++) {
  845       uc = u[j][i].u;
  846       vc = u[j][i].v;
  847 
  848       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
  849       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
  850 
  851       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
  852       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
  853        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
  854 
  855       stencil[0].i = i; stencil[0].c = 0; entries[0] = -appctx->D1*sy;
  856       stencil[1].i = i; stencil[1].c = 0; entries[1] = -appctx->D1*sy;
  857       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = -appctx->D1*sx;
  858       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = -appctx->D1*sx;
  859       stencil[4].i = i; stencil[4].c = 0; entries[4] = 2.0*appctx->D1*(sx + sy) + vc*vc + appctx->gamma + a;
  860       stencil[5].i = i; stencil[5].c = 1; entries[5] = 2.0*uc*vc;
  861       rowstencil.i = i; rowstencil.c = 0;
  862 
  863       ierr = MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);CHKERRQ(ierr);
  864       if (appctx->aijpc) {
  865         ierr = MatSetValuesStencil(B,1,&rowstencil,6,stencil,entries,INSERT_VALUES);CHKERRQ(ierr);
  866       }
  867       stencil[0].c = 1; entries[0] = -appctx->D2*sy;
  868       stencil[1].c = 1; entries[1] = -appctx->D2*sy;
  869       stencil[2].c = 1; entries[2] = -appctx->D2*sx;
  870       stencil[3].c = 1; entries[3] = -appctx->D2*sx;
  871       stencil[4].c = 1; entries[4] = 2.0*appctx->D2*(sx + sy) - 2.0*uc*vc + appctx->gamma + appctx->kappa + a;
  872       stencil[5].c = 0; entries[5] = -vc*vc;
  873 
  874       ierr = MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);CHKERRQ(ierr);
  875       if (appctx->aijpc) {
  876         ierr = MatSetValuesStencil(B,1,&rowstencil,6,stencil,entries,INSERT_VALUES);CHKERRQ(ierr);
  877       }
  878       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
  879     }
  880   }
  881 
  882   /*
  883      Restore vectors
  884   */
  885   ierr = PetscLogFlops(19*xm*ym);CHKERRQ(ierr);
  886   ierr = DMDAVecRestoreArrayRead(da,localU,&u);CHKERRQ(ierr);
  887   ierr = DMRestoreLocalVector(da,&localU);CHKERRQ(ierr);
  888   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
  889   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
  890   ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
  891   if (appctx->aijpc) {
  892     ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
  893     ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
  894     ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
  895   }
  896   PetscFunctionReturn(0);
  897 }
  898 
  899 PetscErrorCode RHSJacobianByHand(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx)
  900 {
  901   AppCtx         *appctx = (AppCtx*)ctx;     /* user-defined application context */
  902   DM             da;
  903   PetscErrorCode ierr;
  904   PetscInt       i,j,Mx,My,xs,ys,xm,ym;
  905   PetscReal      hx,hy,sx,sy;
  906   PetscScalar    uc,vc;
  907   Field          **u;
  908   Vec            localU;
  909   MatStencil     stencil[6],rowstencil;
  910   PetscScalar    entries[6];
  911 
  912   PetscFunctionBegin;
  913   ierr = TSGetDM(ts,&da);CHKERRQ(ierr);
  914   ierr = DMGetLocalVector(da,&localU);CHKERRQ(ierr);
  915   ierr = DMDAGetInfo(da,PETSC_IGNORE,&Mx,&My,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
  916 
  917   hx = 2.50/(PetscReal)(Mx); sx = 1.0/(hx*hx);
  918   hy = 2.50/(PetscReal)(My); sy = 1.0/(hy*hy);
  919 
  920   /*
  921      Scatter ghost points to local vector,using the 2-step process
  922         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
  923      By placing code between these two statements, computations can be
  924      done while messages are in transition.
  925   */
  926   ierr = DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  927   ierr = DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
  928 
  929   /*
  930      Get pointers to vector data
  931   */
  932   ierr = DMDAVecGetArrayRead(da,localU,&u);CHKERRQ(ierr);
  933 
  934   /*
  935      Get local grid boundaries
  936   */
  937   ierr = DMDAGetCorners(da,&xs,&ys,NULL,&xm,&ym,NULL);CHKERRQ(ierr);
  938 
  939   stencil[0].k = 0;
  940   stencil[1].k = 0;
  941   stencil[2].k = 0;
  942   stencil[3].k = 0;
  943   stencil[4].k = 0;
  944   stencil[5].k = 0;
  945   rowstencil.k = 0;
  946 
  947   /*
  948      Compute function over the locally owned part of the grid
  949   */
  950   for (j=ys; j<ys+ym; j++) {
  951 
  952     stencil[0].j = j-1;
  953     stencil[1].j = j+1;
  954     stencil[2].j = j;
  955     stencil[3].j = j;
  956     stencil[4].j = j;
  957     stencil[5].j = j;
  958     rowstencil.k = 0; rowstencil.j = j;
  959     for (i=xs; i<xs+xm; i++) {
  960       uc = u[j][i].u;
  961       vc = u[j][i].v;
  962 
  963       /*      uxx       = (-2.0*uc + u[j][i-1].u + u[j][i+1].u)*sx;
  964       uyy       = (-2.0*uc + u[j-1][i].u + u[j+1][i].u)*sy;
  965 
  966       vxx       = (-2.0*vc + u[j][i-1].v + u[j][i+1].v)*sx;
  967       vyy       = (-2.0*vc + u[j-1][i].v + u[j+1][i].v)*sy;
  968        f[j][i].u = appctx->D1*(uxx + uyy) - uc*vc*vc + appctx->gamma*(1.0 - uc);*/
  969 
  970       stencil[0].i = i; stencil[0].c = 0; entries[0] = appctx->D1*sy;
  971       stencil[1].i = i; stencil[1].c = 0; entries[1] = appctx->D1*sy;
  972       stencil[2].i = i-1; stencil[2].c = 0; entries[2] = appctx->D1*sx;
  973       stencil[3].i = i+1; stencil[3].c = 0; entries[3] = appctx->D1*sx;
  974       stencil[4].i = i; stencil[4].c = 0; entries[4] = -2.0*appctx->D1*(sx + sy) - vc*vc - appctx->gamma;
  975       stencil[5].i = i; stencil[5].c = 1; entries[5] = -2.0*uc*vc;
  976       rowstencil.i = i; rowstencil.c = 0;
  977 
  978       ierr = MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);CHKERRQ(ierr);
  979 
  980       stencil[0].c = 1; entries[0] = appctx->D2*sy;
  981       stencil[1].c = 1; entries[1] = appctx->D2*sy;
  982       stencil[2].c = 1; entries[2] = appctx->D2*sx;
  983       stencil[3].c = 1; entries[3] = appctx->D2*sx;
  984       stencil[4].c = 1; entries[4] = -2.0*appctx->D2*(sx + sy) + 2.0*uc*vc - appctx->gamma - appctx->kappa;
  985       stencil[5].c = 0; entries[5] = vc*vc;
  986       rowstencil.c = 1;
  987 
  988       ierr = MatSetValuesStencil(A,1,&rowstencil,6,stencil,entries,INSERT_VALUES);CHKERRQ(ierr);
  989       /* f[j][i].v = appctx->D2*(vxx + vyy) + uc*vc*vc - (appctx->gamma + appctx->kappa)*vc; */
  990     }
  991   }
  992 
  993   /*
  994      Restore vectors
  995   */
  996   ierr = PetscLogFlops(19*xm*ym);CHKERRQ(ierr);
  997   ierr = DMDAVecRestoreArrayRead(da,localU,&u);CHKERRQ(ierr);
  998   ierr = DMRestoreLocalVector(da,&localU);CHKERRQ(ierr);
  999   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
 1000   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
 1001   ierr = MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
 1002   if (appctx->aijpc) {
 1003     ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
 1004     ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
 1005     ierr = MatSetOption(B,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);CHKERRQ(ierr);
 1006   }
 1007   PetscFunctionReturn(0);
 1008 }
 1009 
 1010 PetscErrorCode RHSJacobianAdolc(TS ts,PetscReal t,Vec U,Mat A,Mat B,void *ctx)
 1011 {
 1012   AppCtx         *appctx = (AppCtx*)ctx;
 1013   DM             da;
 1014   PetscErrorCode ierr;
 1015   PetscScalar    *u_vec;
 1016   Vec            localU;
 1017 
 1018   PetscFunctionBegin;
 1019   ierr = TSGetDM(ts,&da);CHKERRQ(ierr);
 1020   ierr = DMGetLocalVector(da,&localU);CHKERRQ(ierr);
 1021 
 1022   /*
 1023      Scatter ghost points to local vector,using the 2-step process
 1024         DMGlobalToLocalBegin(),DMGlobalToLocalEnd().
 1025      By placing code between these two statements, computations can be
 1026      done while messages are in transition.
 1027   */
 1028   ierr = DMGlobalToLocalBegin(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
 1029   ierr = DMGlobalToLocalEnd(da,U,INSERT_VALUES,localU);CHKERRQ(ierr);
 1030 
 1031   /* Get pointers to vector data */
 1032   ierr = VecGetArray(localU,&u_vec);CHKERRQ(ierr);
 1033 
 1034   /*
 1035     Compute Jacobian
 1036   */
 1037   ierr = PetscAdolcComputeRHSJacobianLocal(1,A,u_vec,appctx->adctx);CHKERRQ(ierr);
 1038 
 1039   /*
 1040      Restore vectors
 1041   */
 1042   ierr = VecRestoreArray(localU,&u_vec);CHKERRQ(ierr);
 1043   ierr = DMRestoreLocalVector(da,&localU);CHKERRQ(ierr);
 1044   PetscFunctionReturn(0);
 1045 }
 1046 
 1047 /*TEST
 1048 
 1049   build:
 1050     requires: double !complex adolc colpack
 1051 
 1052   test:
 1053     suffix: 1
 1054     nsize: 1
 1055     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian
 1056     output_file: output/adr_ex5adj_1.out
 1057 
 1058   test:
 1059     suffix: 2
 1060     nsize: 1
 1061     args: -ts_max_steps 1 -da_grid_x 12 -da_grid_y 12 -snes_test_jacobian -implicitform
 1062     output_file: output/adr_ex5adj_2.out
 1063 
 1064   test:
 1065     suffix: 3
 1066     nsize: 4
 1067     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor
 1068     output_file: output/adr_ex5adj_3.out
 1069 
 1070   test:
 1071     suffix: 4
 1072     nsize: 4
 1073     args: -ts_max_steps 10 -da_grid_x 12 -da_grid_y 12 -ts_monitor -ts_adjoint_monitor -implicitform
 1074     output_file: output/adr_ex5adj_4.out
 1075 
 1076   testset:
 1077     suffix: 5
 1078     nsize: 4
 1079     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse
 1080     output_file: output/adr_ex5adj_5.out
 1081 
 1082   testset:
 1083     suffix: 6
 1084     nsize: 4
 1085     args: -ts_max_steps 10 -da_grid_x 15 -da_grid_y 15 -ts_monitor -ts_adjoint_monitor -adolc_sparse -implicitform
 1086     output_file: output/adr_ex5adj_6.out
 1087 
 1088 TEST*/