alpaqa 1.0.0a11
Nonconvex constrained optimization
Loading...
Searching...
No Matches
gradient-checker.cpp
Go to the documentation of this file.
4#include <alpaqa-version.h>
5
6#include "options.hpp"
7#include "problem.hpp"
8#include "util.hpp"
9
10#ifdef ALPAQA_HAVE_CASADI
11#include <casadi/config.h>
12#endif
13#ifdef WITH_IPOPT
14#include <IpoptConfig.h>
15#endif
16
17#include <algorithm>
18#include <filesystem>
19#include <fstream>
20#include <iostream>
21#include <random>
22#include <span>
23#include <stdexcept>
24#include <string>
25#include <string_view>
26#include <tuple>
27#include <type_traits>
28namespace fs = std::filesystem;
29
31
32void print_usage(const char *a0) {
33 const auto *opts = " [<problem-type>:][<path>/]<name> [options...]\n";
34 const auto *docs = R"==(
35 problem types:
36 dl: Dynamically loaded problem using the DLProblem class.
37 Specify the prefix of the registration function using the
38 problem.prefix option, e.g. problem.prefix=alpaqa_problem will look
39 for a registration function with the name alpaqa_problem_register.
40 Further options can be passed to the problem using
41 problem.<key>[=<value>].
42 cs: Load a CasADi problem using the CasADiProblem class.
43 If a .tsv file with the same name as the shared library file exists,
44 the bounds and parameters will be loaded from that file. See
45 CasADiProblem::load_numerical_data for more details.
46 The problem parameter can be set using the problem.param option.
47 cu: Load a CUTEst problem using the CUTEstProblem class.
48
49 options:
50 --full-print
51 Print the full gradients.
52 --seed=<seed>
53 Seed for the random number generator.
54 )==";
55 std::cout << "alpaqa gradient-checker (" ALPAQA_VERSION_FULL ")\n\n"
56 " Command-line utility to check problem gradients.\n"
57 " alpaqa is published under the LGPL-3.0.\n"
58 " https://github.com/kul-optec/alpaqa"
59 "\n\n"
60 " Usage: "
61 << a0 << opts << docs << std::endl;
62 std::cout
63 << "Third-party libraries:\n"
64 << " * Eigen " << EIGEN_WORLD_VERSION << '.' << EIGEN_MAJOR_VERSION
65 << '.' << EIGEN_MINOR_VERSION
66 << " (https://gitlab.com/libeigen/eigen) - MPL-2.0\n"
67#ifdef ALPAQA_HAVE_CASADI
68 << " * CasADi " CASADI_VERSION_STRING
69 " (https://github.com/casadi/casadi) - LGPL-3.0-or-later\n"
70#endif
71#ifdef ALPAQA_HAVE_CUTEST
72 << " * CUTEst"
73 " (https://github.com/ralna/CUTEst) - BSD-3-Clause\n"
74#endif
75 << std::endl;
76}
77
78/// Split the string @p s on the first occurrence of @p tok.
79/// Returns ("", s) if tok was not found.
80auto split_once(std::string_view s, char tok = '.') {
81 auto tok_pos = s.find(tok);
82 if (tok_pos == s.npos)
83 return std::make_tuple(std::string_view{}, s);
84 std::string_view key{s.begin(), s.begin() + tok_pos};
85 std::string_view rem{s.begin() + tok_pos + 1, s.end()};
86 return std::make_tuple(key, rem);
87}
88
89auto get_problem_path(const char *const *argv) {
90 bool rel_to_exe = argv[1][0] == '^';
91 std::string_view prob_path_s = argv[1] + static_cast<ptrdiff_t>(rel_to_exe);
92 std::string_view prob_type;
93 std::tie(prob_type, prob_path_s) = split_once(prob_path_s, ':');
94 fs::path prob_path{prob_path_s};
95 if (rel_to_exe)
96 prob_path = fs::canonical(fs::path(argv[0])).parent_path() / prob_path;
97 return std::make_tuple(std::move(prob_path), prob_type);
98}
99
102};
103
104void check_gradients(LoadedProblem &, std::ostream &,
105 const CheckGradientsOpts &);
106
107int main(int argc, const char *argv[]) try {
108 // Check command line options
109 if (argc < 1)
110 return -1;
111 if (argc == 1)
112 return print_usage(argv[0]), 0;
113 if (argc < 2)
114 return print_usage(argv[0]), -1;
115 std::span args{argv, static_cast<size_t>(argc)};
116 Options opts{argc - 2, argv + 2};
117
118 // Check where to write the output to
119 std::ostream &os = std::cout;
120
121 // Check which problem to load
122 auto [prob_path, prob_type] = get_problem_path(argv);
123
124 // Load problem
125 os << "Loading problem " << prob_path << std::endl;
126 auto problem = load_problem(prob_type, prob_path.parent_path(),
127 prob_path.filename(), opts);
128 os << "Loaded problem " << problem.path.stem().c_str() << " from "
129 << problem.path << "\nnvar: " << problem.problem.get_n()
130 << "\nncon: " << problem.problem.get_m() << "\nProvided functions:\n";
131 alpaqa::print_provided_functions(os, problem.problem);
132 os << std::endl;
133
134 // Options
135 auto has_opt = [&opts](std::string_view o) {
136 auto o_it = std::ranges::find(opts.options(), o);
137 if (o_it == opts.options().end())
138 return false;
139 auto index = static_cast<size_t>(o_it - opts.options().begin());
140 opts.used()[index] = true;
141 return true;
142 };
143 CheckGradientsOpts cg_opts{
144 .print_full = has_opt("--full-print"),
145 };
146
147 // Seed rand
148 auto seed = static_cast<unsigned int>(std::time(nullptr));
149 set_params(seed, "--seed", opts);
150 std::srand(seed);
151
152 // Check options
153 auto used = opts.used();
154 auto unused_opt = std::ranges::find(used, false);
155 auto unused_idx = static_cast<size_t>(unused_opt - used.begin());
156 if (unused_opt != used.end())
157 throw std::invalid_argument("Unused option: " +
158 std::string(opts.options()[unused_idx]));
159
160 // Check gradients
161 check_gradients(problem, os, cg_opts);
162
163} catch (std::exception &e) {
164 std::cerr << "Error: " << demangled_typename(typeid(e)) << ":\n "
165 << e.what() << std::endl;
166 return -1;
167}
168
169vec finite_diff(const std::function<real_t(crvec)> &f, crvec x) {
170 const auto n = x.size();
171 vec grad(n);
172 vec h = vec::Zero(n);
173 const auto ε = 5e-6;
174 const auto δ = 1e-2 * ε;
175 for (index_t i = 0; i < n; ++i) {
176 real_t hh = std::abs(x(i)) * ε > δ ? x(i) * ε : δ;
177 h(i) = hh;
178 grad.coeffRef(i) = (f(x + h) - f(x)) / hh;
179 h(i) = 0;
180 }
181 return grad;
182}
183
184void check_gradients(LoadedProblem &lproblem, std::ostream &log,
185 const CheckGradientsOpts &opts) {
186 auto &te_problem = lproblem.problem;
187
188 auto x0 = lproblem.initial_guess_x;
189 auto y0 = lproblem.initial_guess_y;
190 auto sc = x0.norm();
191 auto n = te_problem.get_n(), m = te_problem.get_m();
192
193 std::srand(static_cast<unsigned int>(std::time(nullptr)));
194 vec Σ = 1.5 * vec::Random(m).array() + 2;
195 vec y = y0 + y0.norm() * vec::Random(m);
196 vec x = x0 + sc * vec::Random(n);
197 vec v = 5e-6 * sc * vec::Random(n);
198 vec gx(m);
199 vec wn(n), wm(m);
200
201 auto print_compare = [&log, &opts](const auto &fd, const auto &ad) {
202 auto abs_err = (fd - ad).template lpNorm<Eigen::Infinity>();
203 auto rel_err = abs_err / fd.template lpNorm<Eigen::Infinity>();
204 log << " abs error = " << alpaqa::float_to_str(abs_err) << '\n';
205 log << " rel error = " << alpaqa::float_to_str(rel_err) << '\n';
206 if (opts.print_full) {
207 alpaqa::print_matlab(log << " fd = ", fd);
208 alpaqa::print_matlab(log << " ad = ", ad) << std::endl;
209 }
210 };
211
212 auto f = [&](crvec x) { return te_problem.eval_f(x); };
213 log << "Gradient verification: ∇f(x)\n";
214 vec fd_grad_f = finite_diff(f, x);
215 vec grad_f(n);
216 te_problem.eval_grad_f(x, grad_f);
217 print_compare(fd_grad_f, grad_f);
218
219 log << "Gradient verification: ∇L(x)\n";
220 auto L = [&](crvec x) {
221 te_problem.eval_g(x, gx);
222 return te_problem.eval_f(x) + gx.dot(y);
223 };
224 vec fd_grad_L = finite_diff(L, x);
225 vec grad_L(n);
226 te_problem.eval_grad_L(x, y, grad_L, wn);
227 print_compare(fd_grad_L, grad_L);
228
229 log << "Gradient verification: ∇ψ(x)\n";
230 auto ψ = [&](crvec x) { return te_problem.eval_ψ(x, y, Σ, wm); };
231 vec fd_grad_ψ = finite_diff(ψ, x);
232 vec grad_ψ(n);
233 te_problem.eval_grad_ψ(x, y, Σ, grad_ψ, wn, wm);
234 print_compare(fd_grad_ψ, grad_ψ);
235
236 if (te_problem.provides_eval_hess_ψ_prod()) {
237 log << "Hessian product verification: ∇²ψ(x)\n";
238 vec grad_ψv(n);
239 vec xv = x + v;
240 te_problem.eval_grad_ψ(xv, y, Σ, grad_ψv, wn, wm);
241 vec fd_hess_ψv = grad_ψv - grad_ψ;
242 vec hess_ψv(n);
243 te_problem.eval_hess_ψ_prod(x, y, Σ, 1, v, hess_ψv);
244 print_compare(fd_hess_ψv, hess_ψv);
245 }
246}
#define USING_ALPAQA_CONFIG(Conf)
Definition: config.hpp:54
std::string demangled_typename(const std::type_info &t)
Get the pretty name of the given type as a string.
auto get_problem_path(const char *const *argv)
void print_usage(const char *a0)
auto split_once(std::string_view s, char tok='.')
Split the string s on the first occurrence of tok.
int main(int argc, const char *argv[])
vec finite_diff(const std::function< real_t(crvec)> &f, crvec x)
void check_gradients(LoadedProblem &, std::ostream &, const CheckGradientsOpts &)
void print_provided_functions(std::ostream &os, const TypeErasedProblem< Conf > &problem)
std::ostream & print_matlab(std::ostream &os, const Eigen::Ref< const Eigen::MatrixX< float > > &M, std::string_view end)
Definition: print.hpp:67
std::string float_to_str(F value, int precision)
Definition: print.tpp:67
decltype(auto) set_params(T &t, std::string_view prefix, Options &opts)
Definition: options.hpp:27
LoadedProblem load_problem(std::string_view type, const fs::path &dir, const fs::path &file, Options &opts)
Definition: problem.cpp:150
vec initial_guess_y
Unknowns.
Definition: problem.hpp:21
vec initial_guess_x
Definition: problem.hpp:20
alpaqa::TypeErasedProblem< config_t > problem
Definition: problem.hpp:15
Double-precision double configuration.
Definition: config.hpp:127