%% Setup a toy problem (linear least squares)
n = 5;
m = 2*n;
A = randn(m, n);
x = randn(n, 1);
b = A*x + randn(m, 1);
problem.M = euclideanfactory(n, 1);
problem.cost = @(x) .5*norm(A*x - b)^2;
problem.grad = @(x) A'*(A*x - b);
%% Pick a line search helper for the problem:
% problem.linesearch is a function handle which receives a point x and the
% search direction dx at that point (for steepest descent, it will be the
% negative gradient) and returns a scalar alpha: the first step tried by
% the line search will be alpha*dx. If that step satisfies a sufficient
% decrease condition, it is accepted (Armijo), otherwise it will be
% reduced until it passes the test (backtracking) -- unless backtracking
% is disabled (see below.)
L = norm(A'*A);
problem.linesearch = @(x, dx) 1/L;
%% Call a solver, with some options
options.linesearch = @linesearch_hint; % ls algorithm; not required, will be chosen by default
options.ls_backtrack = false; % prevent Armijo backtracking
options.ls_force_decrease = false; % accept the step even if it increases the cost
[xx, fx, info] = steepestdescent(problem, [], options);
%% Verify that the gradient norm and the step size correspond to each other
% up to the scalar multiplier from problem.linesearch.
ls = [info.linesearch];
semilogy([info.iter], [info.gradnorm] / L, '.-', ...
[info.iter], [[ls.stepsize], NaN], 'o-');
% Above, we add a NaN at the end because the last iteration doesn't make a step
Best,
Nicolas