78 std::ostream &outStream) {
80 if (proj_ == nullPtr) {
81 proj_ = makePtr<PolyhedralProjection<Real>>(makePtrFromRef(bnd));
86 Real ftol = std::sqrt(ROL_EPSILON<Real>());
87 proj_->project(x,outStream);
89 state_->value = obj.
value(x,ftol);
91 obj.
gradient(*state_->gradientVec,x,ftol);
93 state_->stepVec->set(x);
94 state_->stepVec->axpy(-one,state_->gradientVec->dual());
95 proj_->project(*state_->stepVec,outStream);
96 Real fnew = state_->value;
100 fnew = obj.
value(*state_->stepVec,ftol);
104 state_->stepVec->axpy(-one,x);
105 state_->gnorm = state_->stepVec->norm();
106 state_->snorm = ROL_INF<Real>();
108 const Real half(0.5);
111 Real gs = state_->stepVec->apply(*state_->gradientVec);
112 Real denom = (fnew - state_->value - gs);
113 bool flag = maxAlpha_ == alpha0_;
114 alpha0_ = ((denom > ROL_EPSILON<Real>()) ? -half*gs/denom : alpha0bnd_);
115 alpha0_ = ((alpha0_ > alpha0bnd_) ? alpha0_ : one);
116 if (flag) maxAlpha_ = alpha0_;
120 alpha0_ /= state_->gradientVec->norm();
122 state_->searchSize = alpha0_;
130 std::ostream &outStream ) {
133 initialize(x,g,obj,bnd,outStream);
134 Ptr<Vector<Real>> s = x.
clone();
135 Real ftrial(0), gs(0), ftrialP(0), alphaP(0), tol(std::sqrt(ROL_EPSILON<Real>()));
137 bool incAlpha =
false, accept =
true;
140 if (verbosity_ > 0) writeOutput(outStream,
true);
143 state_->stepVec->set(state_->gradientVec->dual());
144 while (status_->check(*state_)) {
147 if (!usePrevAlpha_ && !useAdapt_) state_->searchSize = alpha0_;
148 state_->iterateVec->set(x);
149 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
150 proj_->project(*state_->iterateVec,outStream);
152 ftrial = obj.
value(*state_->iterateVec,tol);
154 s->set(*state_->iterateVec);
156 gs = s->dot(*state_->stepVec);
157 incAlpha = (state_->value - ftrial >= -c1_*gs);
158 if (verbosity_ > 1) {
159 outStream <<
" In TypeB::GradientAlgorithm: Line Search" << std::endl;
160 outStream <<
" Step size: " << state_->searchSize << std::endl;
161 outStream <<
" Trial objective value: " << ftrial << std::endl;
162 outStream <<
" Computed reduction: " << state_->value-ftrial << std::endl;
163 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
164 outStream <<
" Sufficient decrease bound: " << -gs*c1_ << std::endl;
165 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
166 outStream <<
" Increase alpha?: " << incAlpha << std::endl;
168 if (incAlpha && useAdapt_) {
169 ftrialP = ROL_INF<Real>();
170 while ( state_->value - ftrial >= -c1_*gs
172 && state_->searchSize < maxAlpha_
173 && ls_nfval < maxit_ ) {
176 alphaP = state_->searchSize;
178 state_->searchSize *= rhoinc_;
179 state_->searchSize = std::min(state_->searchSize,maxAlpha_);
180 state_->iterateVec->set(x);
181 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
182 proj_->project(*state_->iterateVec,outStream);
184 ftrial = obj.
value(*state_->iterateVec,tol);
186 s->set(*state_->iterateVec);
188 gs = s->dot(*state_->stepVec);
189 if (verbosity_ > 1) {
190 outStream << std::endl;
191 outStream <<
" Step size: " << state_->searchSize << std::endl;
192 outStream <<
" Trial objective value: " << ftrial << std::endl;
193 outStream <<
" Computed reduction: " << state_->value-ftrial << std::endl;
194 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
195 outStream <<
" Sufficient decrease bound: " << -gs*c1_ << std::endl;
196 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
199 if (state_->value - ftrial < -c1_*gs || ftrial > ftrialP) {
201 state_->searchSize = alphaP;
202 state_->iterateVec->set(x);
203 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
204 proj_->project(*state_->iterateVec,outStream);
205 s->set(*state_->iterateVec);
211 while ( state_->value - ftrial < -c1_*gs && ls_nfval < maxit_ ) {
212 state_->searchSize *= rhodec_;
213 state_->iterateVec->set(x);
214 state_->iterateVec->axpy(-state_->searchSize,*state_->stepVec);
215 proj_->project(*state_->iterateVec,outStream);
217 ftrial = obj.
value(*state_->iterateVec,tol);
219 s->set(*state_->iterateVec);
221 gs = s->dot(*state_->stepVec);
222 if (verbosity_ > 1) {
223 outStream << std::endl;
224 outStream <<
" Step size: " << state_->searchSize << std::endl;
225 outStream <<
" Trial objective value: " << ftrial << std::endl;
226 outStream <<
" Computed reduction: " << state_->value-ftrial << std::endl;
227 outStream <<
" Dot product of gradient and step: " << gs << std::endl;
228 outStream <<
" Sufficient decrease bound: " << -gs*c1_ << std::endl;
229 outStream <<
" Number of function evaluations: " << ls_nfval << std::endl;
233 state_->nfval += ls_nfval;
236 state_->stepVec->set(*s);
237 state_->snorm = state_->stepVec->norm();
240 x.
set(*state_->iterateVec);
244 state_->value = ftrial;
247 obj.
gradient(*state_->gradientVec,x,tol);
251 state_->stepVec->set(state_->gradientVec->dual());
254 s->set(x); s->axpy(-one,*state_->stepVec);
255 proj_->project(*s,outStream);
257 state_->gnorm = s->norm();
260 if (verbosity_ > 0) writeOutput(outStream,writeHeader_);
303 std::stringstream hist;
304 hist << std::scientific << std::setprecision(6);
305 if ( state_->iter == 0 ) writeName(os);
306 if ( write_header ) writeHeader(os);
307 if ( state_->iter == 0 ) {
309 hist << std::setw(6) << std::left << state_->iter;
310 hist << std::setw(15) << std::left << state_->value;
311 hist << std::setw(15) << std::left << state_->gnorm;
312 hist << std::setw(15) << std::left <<
"---";
313 hist << std::setw(15) << std::left <<
"---";
314 hist << std::setw(10) << std::left << state_->nfval;
315 hist << std::setw(10) << std::left << state_->ngrad;
320 hist << std::setw(6) << std::left << state_->iter;
321 hist << std::setw(15) << std::left << state_->value;
322 hist << std::setw(15) << std::left << state_->gnorm;
323 hist << std::setw(15) << std::left << state_->snorm;
324 hist << std::setw(15) << std::left << state_->searchSize;
325 hist << std::setw(10) << std::left << state_->nfval;
326 hist << std::setw(10) << std::left << state_->ngrad;