Actual source code: cg_fr.c

  1: /*$Id: cg_fr.c 1.22 05/05/10 15:21:38-05:00 sarich@zorak.(none) $*/

  3: #include "cg.h"                           /*I "tao_solver.h" I*/

  7: static int TaoSolve_CG_FR(TAO_SOLVER tao, void *solver)
  8: {
  9:   TAO_CG *cg = (TAO_CG *) solver;
 10:   TaoVec  *xx,*gg; /* solution vector,  gradient vector */
 11:   TaoVec*  dx=cg->DX, *ww=cg->Work;
 12:   int    iter=0,lsflag=0,info;
 13:   double gnorm2Prev,gdx,f,gnorm,gnorm2,step=0;
 14:   TaoTerminateReason reason;

 16:   TaoFunctionBegin;
 17:   info=TaoGetSolution(tao,&xx);CHKERRQ(info);
 18:   info=TaoGetGradient(tao,&gg);CHKERRQ(info);

 20:   info = TaoComputeMeritFunctionGradient(tao,xx,&f,gg);CHKERRQ(info);
 21:   info = gg->Norm2(&gnorm);  CHKERRQ(info);

 23:   info = dx->SetToZero(); CHKERRQ(info);

 25:   cg->beta=0; cg->restarts=0;
 26:   gnorm2 = gnorm*gnorm; gnorm2Prev = gnorm2;

 28:   /* Enter loop */
 29:   while (1){

 31:     /* Test for convergence */
 32:     info = TaoMonitor(tao,iter++,f,gnorm,0.0,step,&reason);CHKERRQ(info);
 33:     if (reason!=TAO_CONTINUE_ITERATING) break;

 35:     cg->beta=(gnorm2)/(gnorm2Prev);
 36:     info = dx->Axpby(-1.0,gg,cg->beta); CHKERRQ(info);
 37: 
 38:     info = dx->Dot(gg,&gdx); CHKERRQ(info);
 39:     if (gdx>=0){ /* If not a descent direction, use gradient */
 40:       info = dx->ScaleCopyFrom(-1.0,gg); CHKERRQ(info);
 41:       gdx=-gnorm2;
 42:       cg->restarts++; cg->beta=0.0;
 43:       info = PetscLogInfo((tao,"TaoCG: Not Descent Direction: Restart CG at iterate %d with gradient direction.\n",iter)); CHKERRQ(info);
 44:     }

 46:     /* Line Search */
 47:     gnorm2Prev = gnorm2;  step=1.0;
 48:     info = TaoLineSearchApply(tao,xx,gg,dx,ww,&f,&step,&lsflag);CHKERRQ(info);
 49:     info = gg->Norm2(&gnorm);CHKERRQ(info);
 50:     gnorm2 = gnorm*gnorm;

 52:   }
 53: 
 54:   TaoFunctionReturn(0);
 55: }


 58: /*------------------------------------------------------------*/
 61: static int TaoSetOptions_CG_FR(TAO_SOLVER tao, void*solver)
 62: {
 63:   int      info;

 65:   TaoFunctionBegin;

 67:   info = TaoOptionsHead("Nonlinear Conjugate Gradient method for unconstrained optimization");CHKERRQ(info);
 68:   info = TaoLineSearchSetFromOptions(tao);CHKERRQ(info);
 69:   info = TaoOptionsTail();CHKERRQ(info);

 71:   TaoFunctionReturn(0);
 72: }


 75: /* ---------------------------------------------------------- */
 76: /*
 77:     TAOCreate_CGFR - Creates the data structure for the nonlinear CG method
 78:     and sets the function pointers for all the routines it needs to call
 79:     (TAOSolve_CG() etc.)

 82: */
 86: int TaoCreate_CG_FR(TAO_SOLVER tao)
 87: {
 88:   TAO_CG *cg;
 89:   int    info;

 91:   TaoFunctionBegin;

 93:   info = TaoNew(TAO_CG,&cg); CHKERRQ(info);
 94:   info = PetscLogObjectMemory(tao,sizeof(TAO_CG)); CHKERRQ(info);

 96:   info=TaoSetTaoSolveRoutine(tao,TaoSolve_CG_FR,(void*)cg); CHKERRQ(info);
 97:   info=TaoSetTaoSetUpDownRoutines(tao,TaoSetUp_CG,TaoDestroy_CG); CHKERRQ(info);
 98:   info=TaoSetTaoOptionsRoutine(tao,TaoSetOptions_CG_FR); CHKERRQ(info);
 99:   info=TaoSetTaoViewRoutine(tao,TaoView_CG); CHKERRQ(info);

101:   info = TaoSetMaximumIterates(tao,2000); CHKERRQ(info);
102:   info = TaoSetTolerances(tao,1e-4,1e-4,0,0); CHKERRQ(info);
103:   info = TaoSetMaximumFunctionEvaluations(tao,4000); CHKERRQ(info);

105:   info = TaoCreateMoreThuenteLineSearch(tao,0,0.1); CHKERRQ(info);

107:   cg->eta = 1000.0;
108: 
109:   TaoFunctionReturn(0);
110: }