ROL
ROL_PrimalDualActiveSetStep.hpp
Go to the documentation of this file.
1 // @HEADER
2 // ************************************************************************
3 //
4 // Rapid Optimization Library (ROL) Package
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact lead developers:
38 // Drew Kouri (dpkouri@sandia.gov) and
39 // Denis Ridzal (dridzal@sandia.gov)
40 //
41 // ************************************************************************
42 // @HEADER
43 
44 #ifndef ROL_PRIMALDUALACTIVESETSTEP_H
45 #define ROL_PRIMALDUALACTIVESETSTEP_H
46 
47 #include "ROL_Step.hpp"
48 #include "ROL_Vector.hpp"
49 #include "ROL_Krylov.hpp"
50 #include "ROL_Objective.hpp"
51 #include "ROL_BoundConstraint.hpp"
52 #include "ROL_Types.hpp"
53 #include "ROL_Secant.hpp"
54 #include "Teuchos_ParameterList.hpp"
55 
130 namespace ROL {
131 
132 template <class Real>
133 class PrimalDualActiveSetStep : public Step<Real> {
134 private:
135 
136  Teuchos::RCP<Krylov<Real> > krylov_;
137 
138  // Krylov Parameters
139  int iterCR_;
140  int flagCR_;
141  Real itol_;
142 
143  // PDAS Parameters
144  int maxit_;
145  int iter_;
146  int flag_;
147  Real stol_;
148  Real gtol_;
149  Real scale_;
150  Real neps_;
151  bool feasible_;
152 
153  // Dual Variable
154  Teuchos::RCP<Vector<Real> > lambda_;
155  Teuchos::RCP<Vector<Real> > xlam_;
156  Teuchos::RCP<Vector<Real> > x0_;
157  Teuchos::RCP<Vector<Real> > xbnd_;
158  Teuchos::RCP<Vector<Real> > As_;
159  Teuchos::RCP<Vector<Real> > xtmp_;
160  Teuchos::RCP<Vector<Real> > res_;
161  Teuchos::RCP<Vector<Real> > Ag_;
162  Teuchos::RCP<Vector<Real> > rtmp_;
163  Teuchos::RCP<Vector<Real> > gtmp_;
164 
165  // Secant Information
167  Teuchos::RCP<Secant<Real> > secant_;
170 
171  class HessianPD : public LinearOperator<Real> {
172  private:
173  const Teuchos::RCP<Objective<Real> > obj_;
174  const Teuchos::RCP<BoundConstraint<Real> > bnd_;
175  const Teuchos::RCP<Vector<Real> > x_;
176  const Teuchos::RCP<Vector<Real> > xlam_;
177  Teuchos::RCP<Vector<Real> > v_;
178  Real eps_;
179  const Teuchos::RCP<Secant<Real> > secant_;
181  public:
182  HessianPD(const Teuchos::RCP<Objective<Real> > &obj,
183  const Teuchos::RCP<BoundConstraint<Real> > &bnd,
184  const Teuchos::RCP<Vector<Real> > &x,
185  const Teuchos::RCP<Vector<Real> > &xlam,
186  const Real eps = 0,
187  const Teuchos::RCP<Secant<Real> > &secant = Teuchos::null,
188  const bool useSecant = false )
189  : obj_(obj), bnd_(bnd), x_(x), xlam_(xlam),
190  eps_(eps), secant_(secant), useSecant_(useSecant) {
191  v_ = x_->clone();
192  if ( !useSecant || secant == Teuchos::null ) {
193  useSecant_ = false;
194  }
195  }
196  void apply( Vector<Real> &Hv, const Vector<Real> &v, Real &tol ) const {
197  v_->set(v);
198  bnd_->pruneActive(*v_,*xlam_,eps_);
199  if ( useSecant_ ) {
200  secant_->applyB(Hv,*v_);
201  }
202  else {
203  obj_->hessVec(Hv,*v_,*x_,tol);
204  }
205  bnd_->pruneActive(Hv,*xlam_,eps_);
206  }
207  };
208 
209  class PrecondPD : public LinearOperator<Real> {
210  private:
211  const Teuchos::RCP<Objective<Real> > obj_;
212  const Teuchos::RCP<BoundConstraint<Real> > bnd_;
213  const Teuchos::RCP<Vector<Real> > x_;
214  const Teuchos::RCP<Vector<Real> > xlam_;
215  Teuchos::RCP<Vector<Real> > v_;
216  Real eps_;
217  const Teuchos::RCP<Secant<Real> > secant_;
219  public:
220  PrecondPD(const Teuchos::RCP<Objective<Real> > &obj,
221  const Teuchos::RCP<BoundConstraint<Real> > &bnd,
222  const Teuchos::RCP<Vector<Real> > &x,
223  const Teuchos::RCP<Vector<Real> > &xlam,
224  const Real eps = 0,
225  const Teuchos::RCP<Secant<Real> > &secant = Teuchos::null,
226  const bool useSecant = false )
227  : obj_(obj), bnd_(bnd), x_(x), xlam_(xlam),
228  eps_(eps), secant_(secant), useSecant_(useSecant) {
229  v_ = x_->clone();
230  if ( !useSecant || secant == Teuchos::null ) {
231  useSecant_ = false;
232  }
233  }
234  void apply( Vector<Real> &Hv, const Vector<Real> &v, Real &tol ) const {
235  Hv.set(v.dual());
236  }
237  void applyInverse( Vector<Real> &Hv, const Vector<Real> &v, Real &tol ) const {
238  v_->set(v);
239  bnd_->pruneActive(*v_,*xlam_,eps_);
240  if ( useSecant_ ) {
241  secant_->applyH(Hv,*v_);
242  }
243  else {
244  obj_->precond(Hv,*v_,*x_,tol);
245  }
246  bnd_->pruneActive(Hv,*xlam_,eps_);
247  }
248  };
249 
263  Real one(1);
264  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
265  obj.gradient(*(step_state->gradientVec),x,tol);
266  xtmp_->set(x);
267  xtmp_->axpy(-one,(step_state->gradientVec)->dual());
268  con.project(*xtmp_);
269  xtmp_->axpy(-one,x);
270  return xtmp_->norm();
271  }
272 
273 public:
280  PrimalDualActiveSetStep( Teuchos::ParameterList &parlist )
281  : Step<Real>::Step(), krylov_(Teuchos::null),
282  iterCR_(0), flagCR_(0), itol_(0),
283  maxit_(0), iter_(0), flag_(0), stol_(0), gtol_(0), scale_(0),
284  neps_(-ROL_EPSILON<Real>()), feasible_(false),
285  lambda_(Teuchos::null), xlam_(Teuchos::null), x0_(Teuchos::null),
286  xbnd_(Teuchos::null), As_(Teuchos::null), xtmp_(Teuchos::null),
287  res_(Teuchos::null), Ag_(Teuchos::null), rtmp_(Teuchos::null),
288  gtmp_(Teuchos::null),
289  esec_(SECANT_LBFGS), secant_(Teuchos::null), useSecantPrecond_(false),
290  useSecantHessVec_(false) {
291  Real one(1), oem6(1.e-6), oem8(1.e-8);
292  // Algorithmic parameters
293  maxit_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Iteration Limit",10);
294  stol_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Relative Step Tolerance",oem8);
295  gtol_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Relative Gradient Tolerance",oem6);
296  scale_ = parlist.sublist("Step").sublist("Primal Dual Active Set").get("Dual Scaling", one);
297  // Build secant object
298  esec_ = StringToESecant(parlist.sublist("General").sublist("Secant").get("Type","Limited-Memory BFGS"));
299  useSecantHessVec_ = parlist.sublist("General").sublist("Secant").get("Use as Hessian", false);
300  useSecantPrecond_ = parlist.sublist("General").sublist("Secant").get("Use as Preconditioner", false);
302  secant_ = SecantFactory<Real>(parlist);
303  }
304  // Build Krylov object
305  krylov_ = KrylovFactory<Real>(parlist);
306  }
307 
319  void initialize( Vector<Real> &x, const Vector<Real> &s, const Vector<Real> &g,
321  AlgorithmState<Real> &algo_state ) {
322  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
323  Real zero(0), one(1);
324  // Initialize state descent direction and gradient storage
325  step_state->descentVec = s.clone();
326  step_state->gradientVec = g.clone();
327  step_state->searchSize = zero;
328  // Initialize additional storage
329  xlam_ = x.clone();
330  x0_ = x.clone();
331  xbnd_ = x.clone();
332  As_ = s.clone();
333  xtmp_ = x.clone();
334  res_ = g.clone();
335  Ag_ = g.clone();
336  rtmp_ = g.clone();
337  gtmp_ = g.clone();
338  // Project x onto constraint set
339  con.project(x);
340  // Update objective function, get value, and get gradient
341  Real tol = std::sqrt(ROL_EPSILON<Real>());
342  obj.update(x,true,algo_state.iter);
343  algo_state.value = obj.value(x,tol);
344  algo_state.nfval++;
345  algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
346  algo_state.ngrad++;
347  // Initialize dual variable
348  lambda_ = s.clone();
349  lambda_->set((step_state->gradientVec)->dual());
350  lambda_->scale(-one);
351  //con.setVectorToLowerBound(*lambda_);
352  }
353 
380  AlgorithmState<Real> &algo_state ) {
381  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
382  Real zero(0), one(1);
383  s.zero();
384  x0_->set(x);
385  res_->set(*(step_state->gradientVec));
386  for ( iter_ = 0; iter_ < maxit_; iter_++ ) {
387  /********************************************************************/
388  // MODIFY ITERATE VECTOR TO CHECK ACTIVE SET
389  /********************************************************************/
390  xlam_->set(*x0_); // xlam = x0
391  xlam_->axpy(scale_,*(lambda_)); // xlam = x0 + c*lambda
392  /********************************************************************/
393  // PROJECT x ONTO PRIMAL DUAL FEASIBLE SET
394  /********************************************************************/
395  As_->zero(); // As = 0
396 
397  con.setVectorToUpperBound(*xbnd_); // xbnd = u
398  xbnd_->axpy(-one,x); // xbnd = u - x
399  xtmp_->set(*xbnd_); // tmp = u - x
400  con.pruneUpperActive(*xtmp_,*xlam_,neps_); // tmp = I(u - x)
401  xbnd_->axpy(-one,*xtmp_); // xbnd = A(u - x)
402  As_->plus(*xbnd_); // As += A(u - x)
403 
404  con.setVectorToLowerBound(*xbnd_); // xbnd = l
405  xbnd_->axpy(-one,x); // xbnd = l - x
406  xtmp_->set(*xbnd_); // tmp = l - x
407  con.pruneLowerActive(*xtmp_,*xlam_,neps_); // tmp = I(l - x)
408  xbnd_->axpy(-one,*xtmp_); // xbnd = A(l - x)
409  As_->plus(*xbnd_); // As += A(l - x)
410  /********************************************************************/
411  // APPLY HESSIAN TO ACTIVE COMPONENTS OF s AND REMOVE INACTIVE
412  /********************************************************************/
413  itol_ = std::sqrt(ROL_EPSILON<Real>());
414  if ( useSecantHessVec_ && secant_ != Teuchos::null ) { // IHAs = H*As
415  secant_->applyB(*gtmp_,*As_);
416  }
417  else {
418  obj.hessVec(*gtmp_,*As_,x,itol_);
419  }
420  con.pruneActive(*gtmp_,*xlam_,neps_); // IHAs = I(H*As)
421  /********************************************************************/
422  // SEPARATE ACTIVE AND INACTIVE COMPONENTS OF THE GRADIENT
423  /********************************************************************/
424  rtmp_->set(*(step_state->gradientVec)); // Inactive components
425  con.pruneActive(*rtmp_,*xlam_,neps_);
426 
427  Ag_->set(*(step_state->gradientVec)); // Active components
428  Ag_->axpy(-one,*rtmp_);
429  /********************************************************************/
430  // SOLVE REDUCED NEWTON SYSTEM
431  /********************************************************************/
432  rtmp_->plus(*gtmp_);
433  rtmp_->scale(-one); // rhs = -Ig - I(H*As)
434  s.zero();
435  if ( rtmp_->norm() > zero ) {
436  // Initialize Hessian and preconditioner
437  Teuchos::RCP<Objective<Real> > obj_ptr = Teuchos::rcpFromRef(obj);
438  Teuchos::RCP<BoundConstraint<Real> > con_ptr = Teuchos::rcpFromRef(con);
439  Teuchos::RCP<LinearOperator<Real> > hessian
440  = Teuchos::rcp(new HessianPD(obj_ptr,con_ptr,
442  Teuchos::RCP<LinearOperator<Real> > precond
443  = Teuchos::rcp(new PrecondPD(obj_ptr,con_ptr,
445  //solve(s,*rtmp_,*xlam_,x,obj,con); // Call conjugate residuals
446  krylov_->run(s,*hessian,*rtmp_,*precond,iterCR_,flagCR_);
447  con.pruneActive(s,*xlam_,neps_); // s <- Is
448  }
449  s.plus(*As_); // s = Is + As
450  /********************************************************************/
451  // UPDATE MULTIPLIER
452  /********************************************************************/
453  if ( useSecantHessVec_ && secant_ != Teuchos::null ) {
454  secant_->applyB(*rtmp_,s);
455  }
456  else {
457  obj.hessVec(*rtmp_,s,x,itol_);
458  }
459  gtmp_->set(*rtmp_);
460  con.pruneActive(*gtmp_,*xlam_,neps_);
461  lambda_->set(*rtmp_);
462  lambda_->axpy(-one,*gtmp_);
463  lambda_->plus(*Ag_);
464  lambda_->scale(-one);
465  /********************************************************************/
466  // UPDATE STEP
467  /********************************************************************/
468  x0_->set(x);
469  x0_->plus(s);
470  res_->set(*(step_state->gradientVec));
471  res_->plus(*rtmp_);
472  // Compute criticality measure
473  xtmp_->set(*x0_);
474  xtmp_->axpy(-one,res_->dual());
475  con.project(*xtmp_);
476  xtmp_->axpy(-one,*x0_);
477 // std::cout << s.norm() << " "
478 // << tmp->norm() << " "
479 // << res_->norm() << " "
480 // << lambda_->norm() << " "
481 // << flagCR_ << " "
482 // << iterCR_ << "\n";
483  if ( xtmp_->norm() < gtol_*algo_state.gnorm ) {
484  flag_ = 0;
485  break;
486  }
487  if ( s.norm() < stol_*x.norm() ) {
488  flag_ = 2;
489  break;
490  }
491  }
492  if ( iter_ == maxit_ ) {
493  flag_ = 1;
494  }
495  else {
496  iter_++;
497  }
498  }
499 
512  AlgorithmState<Real> &algo_state ) {
513  Teuchos::RCP<StepState<Real> > step_state = Step<Real>::getState();
514 
515  x.plus(s);
516  feasible_ = con.isFeasible(x);
517  algo_state.snorm = s.norm();
518  algo_state.iter++;
519  Real tol = std::sqrt(ROL_EPSILON<Real>());
520  obj.update(x,true,algo_state.iter);
521  algo_state.value = obj.value(x,tol);
522  algo_state.nfval++;
523 
524  if ( secant_ != Teuchos::null ) {
525  gtmp_->set(*(step_state->gradientVec));
526  }
527  algo_state.gnorm = computeCriticalityMeasure(x,obj,con,tol);
528  algo_state.ngrad++;
529 
530  if ( secant_ != Teuchos::null ) {
531  secant_->updateStorage(x,*(step_state->gradientVec),*gtmp_,s,algo_state.snorm,algo_state.iter+1);
532  }
533  (algo_state.iterateVec)->set(x);
534  }
535 
541  std::string printHeader( void ) const {
542  std::stringstream hist;
543  hist << " ";
544  hist << std::setw(6) << std::left << "iter";
545  hist << std::setw(15) << std::left << "value";
546  hist << std::setw(15) << std::left << "gnorm";
547  hist << std::setw(15) << std::left << "snorm";
548  hist << std::setw(10) << std::left << "#fval";
549  hist << std::setw(10) << std::left << "#grad";
550  if ( maxit_ > 1 ) {
551  hist << std::setw(10) << std::left << "iterPDAS";
552  hist << std::setw(10) << std::left << "flagPDAS";
553  }
554  else {
555  hist << std::setw(10) << std::left << "iterCR";
556  hist << std::setw(10) << std::left << "flagCR";
557  }
558  hist << std::setw(10) << std::left << "feasible";
559  hist << "\n";
560  return hist.str();
561  }
562 
568  std::string printName( void ) const {
569  std::stringstream hist;
570  hist << "\nPrimal Dual Active Set Newton's Method\n";
571  return hist.str();
572  }
573 
581  virtual std::string print( AlgorithmState<Real> &algo_state, bool print_header = false ) const {
582  std::stringstream hist;
583  hist << std::scientific << std::setprecision(6);
584  if ( algo_state.iter == 0 ) {
585  hist << printName();
586  }
587  if ( print_header ) {
588  hist << printHeader();
589  }
590  if ( algo_state.iter == 0 ) {
591  hist << " ";
592  hist << std::setw(6) << std::left << algo_state.iter;
593  hist << std::setw(15) << std::left << algo_state.value;
594  hist << std::setw(15) << std::left << algo_state.gnorm;
595  hist << "\n";
596  }
597  else {
598  hist << " ";
599  hist << std::setw(6) << std::left << algo_state.iter;
600  hist << std::setw(15) << std::left << algo_state.value;
601  hist << std::setw(15) << std::left << algo_state.gnorm;
602  hist << std::setw(15) << std::left << algo_state.snorm;
603  hist << std::setw(10) << std::left << algo_state.nfval;
604  hist << std::setw(10) << std::left << algo_state.ngrad;
605  if ( maxit_ > 1 ) {
606  hist << std::setw(10) << std::left << iter_;
607  hist << std::setw(10) << std::left << flag_;
608  }
609  else {
610  hist << std::setw(10) << std::left << iterCR_;
611  hist << std::setw(10) << std::left << flagCR_;
612  }
613  if ( feasible_ ) {
614  hist << std::setw(10) << std::left << "YES";
615  }
616  else {
617  hist << std::setw(10) << std::left << "NO";
618  }
619  hist << "\n";
620  }
621  return hist.str();
622  }
623 
624 }; // class PrimalDualActiveSetStep
625 
626 } // namespace ROL
627 
628 #endif
629 
630 // void solve(Vector<Real> &sol, const Vector<Real> &rhs, const Vector<Real> &xlam, const Vector<Real> &x,
631 // Objective<Real> &obj, BoundConstraint<Real> &con) {
632 // Real rnorm = rhs.norm();
633 // Real rtol = std::min(tol1_,tol2_*rnorm);
634 // itol_ = std::sqrt(ROL_EPSILON<Real>());
635 // sol.zero();
636 //
637 // Teuchos::RCP<Vector<Real> > res = rhs.clone();
638 // res->set(rhs);
639 //
640 // Teuchos::RCP<Vector<Real> > v = x.clone();
641 // con.pruneActive(*res,xlam,neps_);
642 // obj.precond(*v,*res,x,itol_);
643 // con.pruneActive(*v,xlam,neps_);
644 //
645 // Teuchos::RCP<Vector<Real> > p = x.clone();
646 // p->set(*v);
647 //
648 // Teuchos::RCP<Vector<Real> > Hp = x.clone();
649 //
650 // iterCR_ = 0;
651 // flagCR_ = 0;
652 //
653 // Real kappa = 0.0, beta = 0.0, alpha = 0.0, tmp = 0.0, rv = v->dot(*res);
654 //
655 // for (iterCR_ = 0; iterCR_ < maxitCR_; iterCR_++) {
656 // if ( false ) {
657 // itol_ = rtol/(maxitCR_*rnorm);
658 // }
659 // con.pruneActive(*p,xlam,neps_);
660 // if ( secant_ == Teuchos::null ) {
661 // obj.hessVec(*Hp, *p, x, itol_);
662 // }
663 // else {
664 // secant_->applyB( *Hp, *p, x );
665 // }
666 // con.pruneActive(*Hp,xlam,neps_);
667 //
668 // kappa = p->dot(*Hp);
669 // if ( kappa <= 0.0 ) { flagCR_ = 2; break; }
670 // alpha = rv/kappa;
671 // sol.axpy(alpha,*p);
672 //
673 // res->axpy(-alpha,*Hp);
674 // rnorm = res->norm();
675 // if ( rnorm < rtol ) { break; }
676 //
677 // con.pruneActive(*res,xlam,neps_);
678 // obj.precond(*v,*res,x,itol_);
679 // con.pruneActive(*v,xlam,neps_);
680 // tmp = rv;
681 // rv = v->dot(*res);
682 // beta = rv/tmp;
683 //
684 // p->scale(beta);
685 // p->axpy(1.0,*v);
686 // }
687 // if ( iterCR_ == maxitCR_ ) {
688 // flagCR_ = 1;
689 // }
690 // else {
691 // iterCR_++;
692 // }
693 // }
694 
695 
696 // /** \brief Apply the inactive components of the Hessian operator.
697 //
698 // I.e., the components corresponding to \f$\mathcal{I}_k\f$.
699 //
700 // @param[out] hv is the result of applying the Hessian at @b x to
701 // @b v
702 // @param[in] v is the direction in which we apply the Hessian
703 // @param[in] x is the current iteration vector \f$x_k\f$
704 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
705 // @param[in] obj is the objective function
706 // @param[in] con are the bound constraints
707 // */
708 // void applyInactiveHessian(Vector<Real> &hv, const Vector<Real> &v, const Vector<Real> &x,
709 // const Vector<Real> &xlam, Objective<Real> &obj, BoundConstraint<Real> &con) {
710 // Teuchos::RCP<Vector<Real> > tmp = v.clone();
711 // tmp->set(v);
712 // con.pruneActive(*tmp,xlam,neps_);
713 // if ( secant_ == Teuchos::null ) {
714 // obj.hessVec(hv,*tmp,x,itol_);
715 // }
716 // else {
717 // secant_->applyB(hv,*tmp,x);
718 // }
719 // con.pruneActive(hv,xlam,neps_);
720 // }
721 //
722 // /** \brief Apply the inactive components of the preconditioner operator.
723 //
724 // I.e., the components corresponding to \f$\mathcal{I}_k\f$.
725 //
726 // @param[out] hv is the result of applying the preconditioner at @b x to
727 // @b v
728 // @param[in] v is the direction in which we apply the preconditioner
729 // @param[in] x is the current iteration vector \f$x_k\f$
730 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
731 // @param[in] obj is the objective function
732 // @param[in] con are the bound constraints
733 // */
734 // void applyInactivePrecond(Vector<Real> &pv, const Vector<Real> &v, const Vector<Real> &x,
735 // const Vector<Real> &xlam, Objective<Real> &obj, BoundConstraint<Real> &con) {
736 // Teuchos::RCP<Vector<Real> > tmp = v.clone();
737 // tmp->set(v);
738 // con.pruneActive(*tmp,xlam,neps_);
739 // obj.precond(pv,*tmp,x,itol_);
740 // con.pruneActive(pv,xlam,neps_);
741 // }
742 //
743 // /** \brief Solve the inactive part of the PDAS optimality system.
744 //
745 // The inactive PDAS optimality system is
746 // \f[
747 // \nabla^2 f(x_k)_{\mathcal{I}_k,\mathcal{I}_k}s =
748 // -\nabla f(x_k)_{\mathcal{I}_k}
749 // -\nabla^2 f(x_k)_{\mathcal{I}_k,\mathcal{A}_k} (s_k)_{\mathcal{A}_k}.
750 // \f]
751 // Since the inactive part of the Hessian may not be positive definite, we solve
752 // using CR.
753 //
754 // @param[out] sol is the vector containing the solution
755 // @param[in] rhs is the right-hand side vector
756 // @param[in] xlam is the vector \f$x_k + c\lambda_k\f$
757 // @param[in] x is the current iteration vector \f$x_k\f$
758 // @param[in] obj is the objective function
759 // @param[in] con are the bound constraints
760 // */
761 // // Solve the inactive part of the optimality system using conjugate residuals
762 // void solve(Vector<Real> &sol, const Vector<Real> &rhs, const Vector<Real> &xlam, const Vector<Real> &x,
763 // Objective<Real> &obj, BoundConstraint<Real> &con) {
764 // // Initialize Residual
765 // Teuchos::RCP<Vector<Real> > res = rhs.clone();
766 // res->set(rhs);
767 // Real rnorm = res->norm();
768 // Real rtol = std::min(tol1_,tol2_*rnorm);
769 // if ( false ) { itol_ = rtol/(maxitCR_*rnorm); }
770 // sol.zero();
771 //
772 // // Apply preconditioner to residual r = Mres
773 // Teuchos::RCP<Vector<Real> > r = x.clone();
774 // applyInactivePrecond(*r,*res,x,xlam,obj,con);
775 //
776 // // Initialize direction p = v
777 // Teuchos::RCP<Vector<Real> > p = x.clone();
778 // p->set(*r);
779 //
780 // // Apply Hessian to v
781 // Teuchos::RCP<Vector<Real> > Hr = x.clone();
782 // applyInactiveHessian(*Hr,*r,x,xlam,obj,con);
783 //
784 // // Apply Hessian to p
785 // Teuchos::RCP<Vector<Real> > Hp = x.clone();
786 // Teuchos::RCP<Vector<Real> > MHp = x.clone();
787 // Hp->set(*Hr);
788 //
789 // iterCR_ = 0;
790 // flagCR_ = 0;
791 //
792 // Real kappa = 0.0, beta = 0.0, alpha = 0.0, tmp = 0.0, rHr = Hr->dot(*r);
793 //
794 // for (iterCR_ = 0; iterCR_ < maxitCR_; iterCR_++) {
795 // // Precondition Hp
796 // applyInactivePrecond(*MHp,*Hp,x,xlam,obj,con);
797 //
798 // kappa = Hp->dot(*MHp); // p' H M H p
799 // alpha = rHr/kappa; // r' M H M r
800 // sol.axpy(alpha,*p); // update step
801 // res->axpy(-alpha,*Hp); // residual
802 // r->axpy(-alpha,*MHp); // preconditioned residual
803 //
804 // // recompute rnorm and decide whether or not to exit
805 // rnorm = res->norm();
806 // if ( rnorm < rtol ) { break; }
807 //
808 // // Apply Hessian to v
809 // itol_ = rtol/(maxitCR_*rnorm);
810 // applyInactiveHessian(*Hr,*r,x,xlam,obj,con);
811 //
812 // tmp = rHr;
813 // rHr = Hr->dot(*r);
814 // beta = rHr/tmp;
815 // p->scale(beta);
816 // p->axpy(1.0,*r);
817 // Hp->scale(beta);
818 // Hp->axpy(1.0,*Hr);
819 // }
820 // if ( iterCR_ == maxitCR_ ) {
821 // flagCR_ = 1;
822 // }
823 // else {
824 // iterCR_++;
825 // }
826 // }
Teuchos::RCP< Vector< Real > > rtmp_
Container for temporary right hand side storage.
Implements the computation of optimization steps with the Newton primal-dual active set method...
Provides the interface to evaluate objective functions.
Teuchos::RCP< Vector< Real > > As_
Container for step projected onto active set.
virtual void plus(const Vector &x)=0
Compute , where .
const Teuchos::RCP< Vector< Real > > xlam_
virtual Real value(const Vector< Real > &x, Real &tol)=0
Compute value.
Provides the interface to compute optimization steps.
Definition: ROL_Step.hpp:69
virtual void hessVec(Vector< Real > &hv, const Vector< Real > &v, const Vector< Real > &x, Real &tol)
Apply Hessian approximation to vector.
Teuchos::RCP< StepState< Real > > getState(void)
Definition: ROL_Step.hpp:74
Contains definitions of custom data types in ROL.
virtual Teuchos::RCP< Vector > clone() const =0
Clone to make a new (uninitialized) vector.
Teuchos::RCP< Krylov< Real > > krylov_
ESecant StringToESecant(std::string s)
Definition: ROL_Types.hpp:477
virtual void setVectorToUpperBound(Vector< Real > &u)
Set the input vector to the upper bound.
Teuchos::RCP< Vector< Real > > gtmp_
Container for temporary gradient storage.
Teuchos::RCP< Secant< Real > > secant_
Secant object.
virtual void zero()
Set to zero vector.
Definition: ROL_Vector.hpp:157
Real scale_
Scale for dual variables in the active set, .
const Teuchos::RCP< Secant< Real > > secant_
Defines the linear algebra or vector space interface.
Definition: ROL_Vector.hpp:74
const Teuchos::RCP< BoundConstraint< Real > > bnd_
Teuchos::RCP< Vector< Real > > x0_
Container for initial priaml variables.
std::string printName(void) const
Print step name.
virtual void pruneActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0)
Set variables to zero if they correspond to the -active set.
Teuchos::RCP< Vector< Real > > lambda_
Container for dual variables.
virtual void pruneLowerActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0)
Set variables to zero if they correspond to the lower -active set.
std::string printHeader(void) const
Print iterate header.
const Teuchos::RCP< Objective< Real > > obj_
State for algorithm class. Will be used for restarts.
Definition: ROL_Types.hpp:91
const Teuchos::RCP< Objective< Real > > obj_
HessianPD(const Teuchos::RCP< Objective< Real > > &obj, const Teuchos::RCP< BoundConstraint< Real > > &bnd, const Teuchos::RCP< Vector< Real > > &x, const Teuchos::RCP< Vector< Real > > &xlam, const Real eps=0, const Teuchos::RCP< Secant< Real > > &secant=Teuchos::null, const bool useSecant=false)
virtual void gradient(Vector< Real > &g, const Vector< Real > &x, Real &tol)
Compute gradient.
PrecondPD(const Teuchos::RCP< Objective< Real > > &obj, const Teuchos::RCP< BoundConstraint< Real > > &bnd, const Teuchos::RCP< Vector< Real > > &x, const Teuchos::RCP< Vector< Real > > &xlam, const Real eps=0, const Teuchos::RCP< Secant< Real > > &secant=Teuchos::null, const bool useSecant=false)
virtual const Vector & dual() const
Return dual representation of , for example, the result of applying a Riesz map, or change of basis...
Definition: ROL_Vector.hpp:213
void applyInverse(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply inverse of linear operator.
ESecant
Enumeration of secant update algorithms.
Definition: ROL_Types.hpp:420
void initialize(Vector< Real > &x, const Vector< Real > &s, const Vector< Real > &g, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Initialize step.
const Teuchos::RCP< Vector< Real > > xlam_
bool feasible_
Flag whether the current iterate is feasible or not.
Provides interface for and implements limited-memory secant operators.
Definition: ROL_Secant.hpp:70
Real computeCriticalityMeasure(Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, Real tol)
Compute the gradient-based criticality measure.
virtual void pruneUpperActive(Vector< Real > &v, const Vector< Real > &x, Real eps=0)
Set variables to zero if they correspond to the upper -active set.
Teuchos::RCP< Vector< Real > > xbnd_
Container for primal variable bounds.
void apply(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply linear operator.
const Teuchos::RCP< Secant< Real > > secant_
Teuchos::RCP< Vector< Real > > xlam_
Container for primal plus dual variables.
Provides the interface to apply a linear operator.
virtual void setVectorToLowerBound(Vector< Real > &l)
Set the input vector to the lower bound.
Provides the interface to apply upper and lower bound constraints.
void compute(Vector< Real > &s, const Vector< Real > &x, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Compute step.
Real gtol_
PDAS gradient stopping tolerance.
void apply(Vector< Real > &Hv, const Vector< Real > &v, Real &tol) const
Apply linear operator.
PrimalDualActiveSetStep(Teuchos::ParameterList &parlist)
Constructor.
Teuchos::RCP< Vector< Real > > xtmp_
Container for temporary primal storage.
Real stol_
PDAS minimum step size stopping tolerance.
Teuchos::RCP< Vector< Real > > iterateVec
Definition: ROL_Types.hpp:105
virtual void set(const Vector &x)
Set where .
Definition: ROL_Vector.hpp:196
virtual Real norm() const =0
Returns where .
virtual void update(const Vector< Real > &x, bool flag=true, int iter=-1)
Update objective function.
const Teuchos::RCP< BoundConstraint< Real > > bnd_
Real ROL_EPSILON(void)
Platform-dependent machine epsilon.
Definition: ROL_Types.hpp:139
ESecant esec_
Enum for secant type.
virtual std::string print(AlgorithmState< Real > &algo_state, bool print_header=false) const
Print iterate status.
virtual bool isFeasible(const Vector< Real > &v)
Check if the vector, v, is feasible.
void update(Vector< Real > &x, const Vector< Real > &s, Objective< Real > &obj, BoundConstraint< Real > &con, AlgorithmState< Real > &algo_state)
Update step, if successful.
Teuchos::RCP< Vector< Real > > res_
Container for optimality system residual for quadratic model.
virtual void project(Vector< Real > &x)
Project optimization variables onto the bounds.
int maxit_
Maximum number of PDAS iterations.
Teuchos::RCP< Vector< Real > > Ag_
Container for gradient projected onto active set.