Unconstrained Optimization
Mathwrist Code Example Series
(October 5, 2023)
Abstract
This document gives some code examples on how to use the unconstrained optimization features in Mathwrist’s C++ Numerical Programming Library (NPL).
1 Rosenbrock Function
This example implements the Rosenbrock test objective function by deriving from the FunctionND interface. It has minimum at .
1
// Rosenbrock’s function for unconstrained optimization.
2
struct Rosenbrock : public FunctionND
3
{
4
// Clone is not used in this example.
5
FunctionND* clone() const { return 0; }
7
double eval(const Matrix& x, Matrix* _g=0, Matrix* _H=0) const
8
{
9
double x1 = x(0, 0);
10
double x2 = x(1, 0);
11
const double retval = 100 * std::pow(x2 - x1 * x1, 2) +
12
std::pow(1 - x1, 2);
14
if (_g != 0)
15
{
16
(*_g)(0, 0) = 400 * (x1 * x1 * x1 - x1 * x2) + 2 * x1 - 2;
17
(*_g)(1, 0) = 200 * (x2 - x1 * x1);
18
}
20
if (_H != 0)
21
{
22
(*_H)(0, 0) = 1200 * x1 * x1 - 400 * x2 + 2;
23
(*_H)(0, 1) = -400 * x1;
24
(*_H)(1, 0) = -400 * x1;
25
(*_H)(1, 1) = 200;
26
}
28
return retval;
29
}
31
size_t dimension_size() const { return 2; }
32
};
2 Linear Search Method
This example illustrates solving n-d unconstrained optimization problem using linear search methods.
1
Rosenbrock f;
3
Matrix x(2, 1);
5
// Modified Newton method
6
{
7
LineSearchNewton min_search(f);
8
min_search.set_converge_tolerance(1.0e-10);
9
min_search.set_step_tolerance(1.e-9);
10
min_search.set_max_iter(50);
11
min_search.enable_watch();
13
// Initial guess
14
x(0, 0) = -2;
15
x(1, 0) = 2;
17
// max step move dx = 0.5
18
double y = min_search(0.5, x);
19
ftl::Assert(std::abs(x(0, 0) - 1) < 1.e-8);
20
ftl::Assert(std::abs(x(1, 0) - 1) < 1.e-8);
21
ftl::Assert(std::abs(y) < 1.e-8);
22
}
24
// Quasi-Newton method
25
{
26
LineSearchQuasiNewton min_search(f);
27
min_search.set_converge_tolerance(1.0e-7);
28
min_search.set_step_tolerance(1.e-9);
29
min_search.set_max_iter(50);
31
// Initial guess
32
x(0, 0) = 2;
33
x(1, 0) = -2;
35
// max step move dx = 0.5
36
double y = min_search(0.5, x);
37
ftl::Assert(std::abs(x(0, 0) - 1) < 1.e-6);
38
ftl::Assert(std::abs(x(1, 0) - 1) < 1.e-6);
39
ftl::Assert(std::abs(y) < 1.e-10);
40
}
3 Trust Region
This example illustrates solving n-d unconstrained optimization problem using trust region methods.
1
Rosenbrock f;
2
Matrix x(2, 1);
4
// Test nearly-exact method
5
{
6
// max radius = 0.5
7
TrustRegionExact min_search(f, 0.5);
9
min_search.set_converge_tolerance(1.0e-8);
10
min_search.set_max_iter(50);
12
// Initial guess
13
x(0, 0) = -1.9;
14
x(1, 0) = 2;
16
// Initial trust region radius = 0.02
17
double y = min_search(0.02, x);
19
ftl::Assert(std::abs(x(0, 0) - 1) < 1.e-8);
20
ftl::Assert(std::abs(x(1, 0) - 1) < 1.e-8);
21
ftl::Assert(std::abs(y) < 1.e-10);
22
}
24
// Test conjugate gradient method
25
{
26
// max radius = 0.5
27
TrustRegionSteihaug min_search(f, 0.5);
29
min_search.set_converge_tolerance(1.0e-8);
30
min_search.set_max_iter(50);
32
// Initial guess
33
x(0, 0) = -1.9;
34
x(1, 0) = 2;
36
// Initial trust region radius = 0.02
37
double y = min_search(0.02, x);
39
ftl::Assert(std::abs(x(0, 0) - 1) < 1.e-8);
40
ftl::Assert(std::abs(x(1, 0) - 1) < 1.e-8);
41
ftl::Assert(std::abs(y) < 1.e-10);
42
}