Nonconvex constrained optimization
Loading...
Searching...
No Matches
CasADiControlProblem.tpp
Go to the documentation of this file.
1#pragma once
2
10#include <guanaqo/dl-flags.hpp>
11#include <guanaqo/io/csv.hpp>
12#include <guanaqo/not-implemented.hpp>
13
14#include <Eigen/Sparse>
15
16#if ALPAQA_WITH_EXTERNAL_CASADI
17#include <casadi/core/external.hpp>
18#endif
19
20#include <fstream>
21#include <memory>
22#include <optional>
23#include <stdexcept>
24#include <type_traits>
25
26namespace alpaqa {
28
29namespace fs = std::filesystem;
30
31namespace casadi_loader {
32
33using namespace alpaqa::casadi_loader;
34
35template <Config Conf>
38
39 static constexpr bool WithParam = true;
48
55
59
63
64 template <class Loader>
65 requires requires(Loader &&loader, const char *name) {
66 { loader(name) } -> std::same_as<casadi::Function>;
67 { loader.format_name(name) } -> std::same_as<std::string>;
68 }
69 static std::unique_ptr<CasADiControlFunctionsWithParam>
70 load(Loader &&loader) {
71 length_t nx, nu, nh, nh_N, nc, nc_N, p;
72 auto load_f = [&]() -> CasADiFunctionEvaluator<Conf, 3, 1> {
73 casadi::Function ffun = loader("f");
74 using namespace std::literals::string_literals;
75 if (ffun.n_in() != 3)
77 "Invalid number of input arguments: got "s +
78 std::to_string(ffun.n_in()) + ", should be 3.");
79 if (ffun.n_out() != 1)
81 "Invalid number of output arguments: got "s +
82 std::to_string(ffun.n_in()) + ", should be 1.");
83 nx = static_cast<length_t>(ffun.size1_in(0));
84 nu = static_cast<length_t>(ffun.size1_in(1));
85 p = static_cast<length_t>(ffun.size1_in(2));
87 f.validate_dimensions({dim(nx, 1), dim(nu, 1), dim(p, 1)},
88 {dim(nx, 1)});
89 return f;
90 };
91 auto load_h = [&]() -> CasADiFunctionEvaluator<Conf, 3, 1> {
92 casadi::Function hfun = loader("h");
93 using namespace std::literals::string_literals;
94 if (hfun.n_in() != 3)
96 "Invalid number of input arguments: got "s +
97 std::to_string(hfun.n_in()) + ", should be 3.");
98 if (hfun.n_out() != 1)
100 "Invalid number of output arguments: got "s +
101 std::to_string(hfun.n_in()) + ", should be 1.");
102 nh = static_cast<length_t>(hfun.size1_out(0));
103 CasADiFunctionEvaluator<Conf, 3, 1> h{std::move(hfun)};
104 h.validate_dimensions({dim(nx, 1), dim(nu, 1), dim(p, 1)},
105 {dim(nh, 1)});
106 return h;
107 };
108 auto load_h_N = [&]() -> CasADiFunctionEvaluator<Conf, 2, 1> {
109 casadi::Function hfun = loader("h_N");
110 using namespace std::literals::string_literals;
111 if (hfun.n_in() != 2)
113 "Invalid number of input arguments: got "s +
114 std::to_string(hfun.n_in()) + ", should be 2.");
115 if (hfun.n_out() != 1)
117 "Invalid number of output arguments: got "s +
118 std::to_string(hfun.n_in()) + ", should be 1.");
119 nh_N = static_cast<length_t>(hfun.size1_out(0));
120 CasADiFunctionEvaluator<Conf, 2, 1> h{std::move(hfun)};
121 h.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nh_N, 1)});
122 return h;
123 };
124 auto load_c = [&]() -> CasADiFunctionEvaluator<Conf, 2, 1> {
125 casadi::Function cfun = loader("c");
126 using namespace std::literals::string_literals;
127 if (cfun.n_in() != 2)
129 "Invalid number of input arguments: got "s +
130 std::to_string(cfun.n_in()) + ", should be 2.");
131 if (cfun.n_out() != 1)
133 "Invalid number of output arguments: got "s +
134 std::to_string(cfun.n_in()) + ", should be 1.");
135 nc = static_cast<length_t>(cfun.size1_out(0));
136 CasADiFunctionEvaluator<Conf, 2, 1> c{std::move(cfun)};
137 c.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nc, 1)});
138 return c;
139 };
140 auto load_c_N = [&]() -> CasADiFunctionEvaluator<Conf, 2, 1> {
141 casadi::Function cfun = loader("c_N");
142 using namespace std::literals::string_literals;
143 if (cfun.n_in() != 2)
145 "Invalid number of input arguments: got "s +
146 std::to_string(cfun.n_in()) + ", should be 2.");
147 if (cfun.n_out() != 1)
149 "Invalid number of output arguments: got "s +
150 std::to_string(cfun.n_in()) + ", should be 1.");
151 nc_N = static_cast<length_t>(cfun.size1_out(0));
152 CasADiFunctionEvaluator<Conf, 2, 1> c{std::move(cfun)};
153 c.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nc_N, 1)});
154 return c;
155 };
156 // Load the functions "f", "h", and "c" to determine the unknown dimensions.
157 auto f = wrap_load(loader, "f", load_f);
158 auto h = wrap_load(loader, "h", load_h);
159 auto h_N = wrap_load(loader, "h_N", load_h_N);
160 auto c = wrap_load(loader, "c", load_c);
161 auto c_N = wrap_load(loader, "c_N", load_c_N);
162
163 auto self = std::make_unique<CasADiControlFunctionsWithParam<Conf>>(
165 .nx = nx,
166 .nu = nu,
167 .nh = nh,
168 .nh_N = nh_N,
169 .nc = nc,
170 .nc_N = nc_N,
171 .p = p,
172 .f = std::move(f),
174 loader, "jacobian_f", dims(nx, nu, p),
175 dims(dim(nx, nx + nu))),
176 .grad_f_prod =
178 loader, "grad_f_prod", dims(nx, nu, p, nx),
179 dims(nx + nu)),
180 .h = std::move(h),
181 .h_N = std::move(h_N),
183 loader, "l", dims(nh, p), dims(1)),
185 loader, "l_N", dims(nh_N, p), dims(1)),
187 loader, "qr", dims(nx + nu, nh, p), dims(nx + nu)),
189 loader, "q_N", dims(nx, nh_N, p), dims(nx)),
191 loader, "Q", dims(nx + nu, nh, p), dims(dim{nx, nx})),
193 loader, "Q_N", dims(nx, nh_N, p), dims(dim{nx, nx})),
195 loader, "R", dims(nx + nu, nh, p), dims(dim{nu, nu})),
197 loader, "S", dims(nx + nu, nh, p), dims(dim{nu, nx})),
198 .c = std::move(c),
199 .grad_c_prod =
201 loader, "grad_c_prod", dims(nx, p, nc), dims(nx)),
203 loader, "gn_hess_c", dims(nx, p, nc), dims(dim{nx, nx})),
204 .c_N = std::move(c_N),
205 .grad_c_prod_N =
207 loader, "grad_c_prod_N", dims(nx, p, nc_N), dims(nx)),
208 .gn_hess_c_N =
210 loader, "gn_hess_c_N", dims(nx, p, nc_N),
211 dims(dim{nx, nx})),
212 });
213 return self;
214 }
215};
216
217} // namespace casadi_loader
218
219template <Config Conf>
221 length_t N,
222 DynamicLoadFlags dl_flags)
223 : N{N} {
224
225 struct {
226 const std::string &filename;
227 DynamicLoadFlags dl_flags;
228 auto operator()(const std::string &name) const {
229#if ALPAQA_WITH_EXTERNAL_CASADI
230 return casadi::external(name, filename);
231#else
232 return casadi::external(name, filename, dl_flags);
233#endif
234 }
235 auto format_name(const std::string &name) const {
236 return filename + ':' + name;
237 }
238 } loader{filename, dl_flags};
240
241 this->nx = impl->nx;
242 this->nu = impl->nu;
243 this->nh = impl->nh;
244 this->nh_N = impl->nh_N;
245 this->nc = impl->nc;
246 this->nc_N = impl->nc_N;
247 this->x_init = vec::Constant(nx, alpaqa::NaN<Conf>);
248 this->param = vec::Constant(impl->p, alpaqa::NaN<Conf>);
249 this->U = Box{nu};
250 this->D = Box{nc};
251 this->D_N = Box{nc_N};
252
253 auto n_work = std::max({
254 impl->Q.fun.sparsity_out(0).nnz(),
255 impl->Q_N.fun.sparsity_out(0).nnz(),
256 impl->gn_hess_c.fun.sparsity_out(0).nnz(),
257 impl->gn_hess_c_N.fun.sparsity_out(0).nnz(),
258 });
259 this->work = vec::Constant(static_cast<length_t>(n_work), NaN<Conf>);
260
261 auto bounds_filepath = fs::path{filename}.replace_extension("csv");
262 if (fs::exists(bounds_filepath))
263 load_numerical_data(bounds_filepath);
264}
265
266template <Config Conf>
268 const std::filesystem::path &filepath, char sep) {
269 // Open data file
270 std::ifstream data_file{filepath};
271 if (!data_file)
272 throw std::runtime_error("Unable to open data file \"" +
273 filepath.string() + '"');
274
275 // Helper function for reading single line of (float) data
276 index_t line = 0;
277 auto wrap_data_load = [&](std::string_view name, auto &v,
278 bool fixed_size = true) {
279 using namespace guanaqo::io;
280 try {
281 ++line;
282 if (data_file.peek() == '\n') // Ignore empty lines
283 return static_cast<void>(data_file.get());
284 if (fixed_size) {
285 csv_read_row(data_file, as_span(v), sep);
286 } else { // Dynamic size
287 auto s = csv_read_row_std_vector<real_t>(data_file, sep);
288 v = as_vec(std::span{s});
289 }
290 } catch (csv_read_error &e) {
291 // Transform any errors in something more readable
292 throw std::runtime_error("Unable to read " + std::string(name) +
293 " from data file \"" + filepath.string() +
294 ':' + std::to_string(line) +
295 "\": " + e.what());
296 }
297 };
298 // Helper function for reading a single value
299 auto read_single = [&](std::string_view name, auto &v) {
300 data_file >> v;
301 if (!data_file)
302 throw std::runtime_error("Unable to read " + std::string(name) +
303 " from data file \"" + filepath.string() +
304 ':' + std::to_string(line) + '"');
305 };
306 wrap_data_load("U.lower", this->U.lower);
307 wrap_data_load("U.upper", this->U.upper);
308 wrap_data_load("D.lower", this->D.lower);
309 wrap_data_load("D.upper", this->D.upper);
310 wrap_data_load("D_N.lower", this->D_N.lower);
311 wrap_data_load("D_N.upper", this->D_N.upper);
312 wrap_data_load("x_init", this->x_init);
313 wrap_data_load("param", this->param);
314 // Penalty/ALM split is a single integer
315 read_single("penalty_alm_split", this->penalty_alm_split);
316 read_single("penalty_alm_split_N", this->penalty_alm_split_N);
317}
318
319template <Config Conf>
321 default;
322template <Config Conf>
325
326template <Config Conf>
328 CasADiControlProblem &&) noexcept = default;
329template <Config Conf>
330CasADiControlProblem<Conf> &CasADiControlProblem<Conf>::operator=(
331 CasADiControlProblem &&) noexcept = default;
332
333template <Config Conf>
335
336template <Config Conf>
338 rvec fxu) const {
339 assert(x.size() == nx);
340 assert(u.size() == nu);
341 assert(fxu.size() == nx);
342 impl->f({x.data(), u.data(), param.data()}, {fxu.data()});
343}
344template <Config Conf>
346 rmat J_fxu) const {
347 assert(x.size() == nx);
348 assert(u.size() == nu);
349 assert(J_fxu.rows() == nx);
350 assert(J_fxu.cols() == nx + nu);
351 impl->jac_f({x.data(), u.data(), param.data()}, {J_fxu.data()});
352}
353template <Config Conf>
355 crvec p,
356 rvec grad_fxu_p) const {
357 assert(x.size() == nx);
358 assert(u.size() == nu);
359 assert(p.size() == nx);
360 assert(grad_fxu_p.size() == nx + nu);
361 impl->grad_f_prod({x.data(), u.data(), param.data(), p.data()},
362 {grad_fxu_p.data()});
363}
364template <Config Conf>
366 rvec h) const {
367 assert(x.size() == nx);
368 assert(u.size() == nu);
369 assert(h.size() == nh);
370 impl->h({x.data(), u.data(), param.data()}, {h.data()});
371}
372template <Config Conf>
374 assert(x.size() == nx);
375 assert(h.size() == nh_N);
376 impl->h_N({x.data(), param.data()}, {h.data()});
377}
378template <Config Conf>
380 assert(h.size() == nh);
381 real_t l;
382 impl->l({h.data(), param.data()}, {&l});
383 return l;
384}
385template <Config Conf>
387 assert(h.size() == nh_N);
388 real_t l;
389 impl->l_N({h.data(), param.data()}, {&l});
390 return l;
391}
392template <Config Conf>
394 rvec qr) const {
395 assert(xu.size() == nx + nu);
396 assert(h.size() == nh);
397 assert(qr.size() == nx + nu);
398 impl->qr({xu.data(), h.data(), param.data()}, {qr.data()});
399}
400template <Config Conf>
402 assert(x.size() == nx);
403 assert(h.size() == nh_N);
404 assert(q.size() == nx);
405 impl->q_N({x.data(), h.data(), param.data()}, {q.data()});
406}
407template <Config Conf>
409 rmat Q) const {
410 assert(xu.size() == nx + nu);
411 assert(h.size() == nh);
412 assert(Q.rows() == nx);
413 assert(Q.cols() == nx);
414 impl->Q({xu.data(), h.data(), param.data()}, {work.data()});
415 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
416 using cmspmat = Eigen::Map<const spmat>;
417 auto &&sparse = impl->Q.fun.sparsity_out(0);
418 if (sparse.is_dense())
419 Q += cmmat{work.data(), nx, nx};
420 else
421 Q += cmspmat{
422 nx,
423 nx,
424 static_cast<length_t>(sparse.nnz()),
425 sparse.colind(),
426 sparse.row(),
427 work.data(),
428 };
429}
430template <Config Conf>
432 assert(x.size() == nx);
433 assert(h.size() == nh_N);
434 assert(Q.rows() == nx);
435 assert(Q.cols() == nx);
436 impl->Q_N({x.data(), h.data(), param.data()}, {work.data()});
437 auto &&sparse = impl->Q_N.fun.sparsity_out(0);
438 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
439 using cmspmat = Eigen::Map<const spmat>;
440 if (sparse.is_dense())
441 Q += cmmat{work.data(), nx, nx};
442 else
443 Q += cmspmat{
444 nx,
445 nx,
446 static_cast<length_t>(sparse.nnz()),
447 sparse.colind(),
448 sparse.row(),
449 work.data(),
450 };
451}
452
453template <Config Conf>
455 crindexvec mask, rmat R,
456 rvec work) const {
457 auto &&sparse = impl->R.fun.sparsity_out(0);
458 assert(xu.size() == nx + nu);
459 assert(h.size() == nh);
460 assert(R.rows() <= nu);
461 assert(R.cols() <= nu);
462 assert(R.rows() == mask.size());
463 assert(R.cols() == mask.size());
464 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
465 impl->R({xu.data(), h.data(), param.data()}, {work.data()});
466 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
467 using cmspmat = Eigen::Map<const spmat>;
468 if (sparse.is_dense()) {
469 cmmat R_full{work.data(), nu, nu};
470 R += R_full(mask, mask);
471 } else {
472 cmspmat R_full{
473 nu,
474 nu,
475 static_cast<length_t>(sparse.nnz()),
476 sparse.colind(),
477 sparse.row(),
478 work.data(),
479 };
480 util::sparse_add_masked(R_full, R, mask);
481 }
482}
483
484template <Config Conf>
486 crindexvec mask, rmat S,
487 rvec work) const {
488 auto &&sparse = impl->S.fun.sparsity_out(0);
489 assert(xu.size() == nx + nu);
490 assert(h.size() == nh);
491 assert(S.rows() <= nu);
492 assert(S.rows() == mask.size());
493 assert(S.cols() == nx);
494 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
495 impl->S({xu.data(), h.data(), param.data()}, {work.data()});
496 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
497 using cmspmat = Eigen::Map<const spmat>;
498 using Eigen::indexing::all;
499 if (sparse.is_dense()) {
500 cmmat S_full{work.data(), nu, nx};
501 S += S_full(mask, all);
502 } else {
503 cmspmat S_full{
504 nu,
505 nx,
506 static_cast<length_t>(sparse.nnz()),
507 sparse.colind(),
508 sparse.row(),
509 work.data(),
510 };
511 util::sparse_add_masked_rows(S_full, S, mask);
512 }
513}
514
515template <Config Conf>
517 crindexvec mask_J,
518 crindexvec mask_K,
519 crvec v, rvec out,
520 rvec work) const {
521 auto &&sparse = impl->R.fun.sparsity_out(0);
522 assert(v.size() == nu);
523 assert(out.size() == mask_J.size());
524 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
525 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
526 using cmspmat = Eigen::Map<const spmat>;
527 if (sparse.is_dense()) {
528 auto R = cmmat{work.data(), nu, nu};
529 out.noalias() += R(mask_J, mask_K) * v(mask_K);
530 } else {
531 cmspmat R{
532 nu,
533 nu,
534 static_cast<length_t>(sparse.nnz()),
535 sparse.colind(),
536 sparse.row(),
537 work.data(),
538 };
539 // out += R_full(mask_J,mask_K) * v(mask_K);
540 util::sparse_matvec_add_masked_rows_cols(R, v, out, mask_J, mask_K);
541 }
542}
543
544template <Config Conf>
546 crindexvec mask_K,
547 crvec v, rvec out,
548 rvec work) const {
549 auto &&sparse = impl->S.fun.sparsity_out(0);
550 assert(v.size() == nu);
551 assert(out.size() == nx);
552 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
553 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
554 using cmspmat = Eigen::Map<const spmat>;
555 using Eigen::indexing::all;
556 if (sparse.is_dense()) {
557 auto Sᵀ = cmmat{work.data(), nu, nx}.transpose();
558 out.noalias() += Sᵀ(all, mask_K) * v(mask_K);
559 } else {
560 cmspmat S{
561 nu,
562 nx,
563 static_cast<length_t>(sparse.nnz()),
564 sparse.colind(),
565 sparse.row(),
566 work.data(),
567 };
568 // out += S(mask_K,:)ᵀ * v(mask_K);
570 }
571}
572
573template <Config Conf>
575 auto &&sparse = impl->R.fun.sparsity_out(0);
576 return static_cast<length_t>(sparse.nnz());
577}
578
579template <Config Conf>
581 auto &&sparse = impl->S.fun.sparsity_out(0);
582 return static_cast<length_t>(sparse.nnz());
583}
584
585template <Config Conf>
587 if (nc == 0)
588 return;
589 assert(x.size() == nx);
590 assert(c.size() == nc);
591 impl->c({x.data(), param.data()}, {c.data()});
592}
593
594template <Config Conf>
596 crvec p,
597 rvec grad_cx_p) const {
598 assert(x.size() == nx);
599 assert(p.size() == nc);
600 assert(grad_cx_p.size() == nx);
601 impl->grad_c_prod({x.data(), param.data(), p.data()}, {grad_cx_p.data()});
602}
603
604template <Config Conf>
606 crvec M,
607 rmat out) const {
608 auto &&sparse = impl->gn_hess_c.fun.sparsity_out(0);
609 assert(x.size() == nx);
610 assert(M.size() == nc);
611 assert(out.rows() == nx);
612 assert(out.cols() == nx);
613 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
614 impl->gn_hess_c({x.data(), param.data(), M.data()}, {work.data()});
615 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
616 using cmspmat = Eigen::Map<const spmat>;
617 if (sparse.is_dense())
618 out += cmmat{work.data(), nx, nx};
619 else
620 out += cmspmat{
621 nx,
622 nx,
623 static_cast<length_t>(sparse.nnz()),
624 sparse.colind(),
625 sparse.row(),
626 work.data(),
627 };
628}
629
630template <Config Conf>
632 if (nc_N == 0)
633 return;
634 assert(x.size() == nx);
635 assert(c.size() == nc_N);
636 impl->c_N({x.data(), param.data()}, {c.data()});
637}
638
639template <Config Conf>
641 rvec grad_cx_p) const {
642 assert(x.size() == nx);
643 assert(p.size() == nc_N);
644 assert(grad_cx_p.size() == nx);
645 impl->grad_c_prod_N({x.data(), param.data(), p.data()}, {grad_cx_p.data()});
646}
647
648template <Config Conf>
650 rmat out) const {
651 auto &&sparse = impl->gn_hess_c.fun.sparsity_out(0);
652 assert(x.size() == nx);
653 assert(M.size() == nc_N);
654 assert(out.rows() == nx);
655 assert(out.cols() == nx);
656 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
657 impl->gn_hess_c_N({x.data(), param.data(), M.data()}, {work.data()});
658 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
659 using cmspmat = Eigen::Map<const spmat>;
660 if (sparse.is_dense())
661 out += cmmat{work.data(), nx, nx};
662 else
663 out += cmspmat{
664 nx,
665 nx,
666 static_cast<length_t>(sparse.nnz()),
667 sparse.colind(),
668 sparse.row(),
669 work.data(),
670 };
671}
672
674} // namespace alpaqa
#define BEGIN_ALPAQA_CASADI_LOADER_NAMESPACE
#define END_ALPAQA_CASADI_LOADER_NAMESPACE
void eval_jac_f(index_t timestep, crvec x, crvec u, rmat J_fxu) const
guanaqo::copyable_unique_ptr< Functions > impl
void eval_add_R_prod_masked(index_t timestep, crvec xu, crvec h, crindexvec mask_J, crindexvec mask_K, crvec v, rvec out, rvec work) const
CasADiControlProblem & operator=(const CasADiControlProblem &)
void eval_add_gn_hess_constr_N(crvec x, crvec M, rmat out) const
void eval_qr(index_t timestep, crvec xu, crvec h, rvec qr) const
index_t penalty_alm_split_N
Same as penalty_alm_split, but for the terminal constraint.
void eval_add_S_prod_masked(index_t timestep, crvec xu, crvec h, crindexvec mask_K, crvec v, rvec out, rvec work) const
void eval_constr_N(crvec x, rvec c) const
void load_numerical_data(const std::filesystem::path &filepath, char sep=',')
Load the numerical problem data (bounds and parameters) from a CSV file.
void eval_grad_constr_prod_N(crvec x, crvec p, rvec grad_cx_p) const
void eval_add_R_masked(index_t timestep, crvec xu, crvec h, crindexvec mask, rmat R, rvec work) const
real_t eval_l(index_t timestep, crvec h) const
void eval_grad_f_prod(index_t timestep, crvec x, crvec u, crvec p, rvec grad_fxu_p) const
index_t penalty_alm_split
Components of the constraint function with indices below this number are handled using a quadratic pe...
void eval_add_gn_hess_constr(index_t timestep, crvec x, crvec M, rmat out) const
void eval_constr(index_t timestep, crvec x, rvec c) const
CasADiControlProblem(const std::string &filename, length_t N, DynamicLoadFlags dl_flags={})
void eval_h(index_t timestep, crvec x, crvec u, rvec h) const
void eval_add_S_masked(index_t timestep, crvec xu, crvec h, crindexvec mask, rmat S, rvec work) const
void eval_q_N(crvec x, crvec h, rvec q) const
void eval_add_Q_N(crvec x, crvec h, rmat Q) const
void eval_grad_constr_prod(index_t timestep, crvec x, crvec p, rvec grad_cx_p) const
void eval_add_Q(index_t timestep, crvec xu, crvec h, rmat Q) const
void eval_f(index_t timestep, crvec x, crvec u, rvec fxu) const
void eval_h_N(crvec x, rvec h) const
Class that loads and calls pre-compiled CasADi functions in a DLL/SO file.
casadi_int size1_out(casadi_int) const
casadi_int size1_in(casadi_int) const
Class for evaluating CasADi functions, allocating the necessary workspace storage in advance for allo...
#define USING_ALPAQA_CONFIG(Conf)
Definition config.hpp:77
auto wrapped_load(Loader &&loader, const char *name, Args &&...args)
constexpr auto dims(auto... a)
std::pair< casadi_int, casadi_int > dim
auto wrap_load(Loader &&loader, const char *name, F f)
Function external(const std::string &name, const std::string &bin_name, DynamicLoadFlags dl_flags)
Load the given CasADi function from the given DLL/SO file.
void sparse_matvec_add_masked_rows_cols(const SpMat &R, const CVec &v, Vec &&out, const MaskVec &mask_J, const MaskVec &mask_K)
out += R(mask_J,mask_K) * v(mask_K);
void sparse_add_masked_rows(const SpMat &S_full, Mat &&S, const MaskVec &mask)
S += S_full(mask,:)
void sparse_add_masked(const SpMat &R_full, Mat &&R, const MaskVec &mask)
R += R_full(mask,mask)
void sparse_matvec_add_transpose_masked_rows(const SpMat &S, const CVec &v, Vec &&out, const MaskVec &mask)
out += S(mask,:)ᵀ * v(mask);
typename Conf::rmat rmat
Definition config.hpp:96
auto as_span(Eigen::DenseBase< Derived > &v)
Convert an Eigen vector view to a std::span.
Definition span.hpp:21
typename Conf::cmmat cmmat
Definition config.hpp:95
typename Conf::real_t real_t
Definition config.hpp:86
constexpr const auto NaN
Definition config.hpp:114
typename Conf::index_t index_t
Definition config.hpp:104
auto as_vec(std::span< T, E > s)
Convert a std::span to an Eigen::Vector view.
Definition span.hpp:51
typename Conf::length_t length_t
Definition config.hpp:103
typename Conf::rvec rvec
Definition config.hpp:91
typename Conf::crvec crvec
Definition config.hpp:92
typename Conf::crindexvec crindexvec
Definition config.hpp:107
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > qr
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > jac_f
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > f
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > c
static std::unique_ptr< CasADiControlFunctionsWithParam > load(Loader &&loader)
CasADiFunctionEvaluator< Conf, 3+WithParam, 1 > grad_f_prod
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > R
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > grad_c_prod
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > q_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > grad_c_prod_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > h_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > l
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > Q
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > S
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > gn_hess_c_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > gn_hess_c
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > l_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > Q_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > c_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > h