alpaqa pi-pico
Nonconvex constrained optimization
Loading...
Searching...
No Matches
CasADiControlProblem.tpp
Go to the documentation of this file.
1#pragma once
2
11#include "CasADiLoader-util.hpp"
12
13#include <Eigen/Sparse>
14
15#if ALPAQA_WITH_EXTERNAL_CASADI
16#include <casadi/core/external.hpp>
17#endif
18
19#include <fstream>
20#include <memory>
21#include <optional>
22#include <stdexcept>
23#include <type_traits>
24
25namespace alpaqa {
27
28namespace fs = std::filesystem;
29
30namespace casadi_loader {
31
32using namespace alpaqa::casadi_loader;
33
34template <Config Conf>
37
38 static constexpr bool WithParam = true;
47
54
58
62
63 template <class Loader>
64 requires requires(Loader &&loader, const char *name) {
65 { loader(name) } -> std::same_as<casadi::Function>;
66 { loader.format_name(name) } -> std::same_as<std::string>;
67 }
68 static std::unique_ptr<CasADiControlFunctionsWithParam>
70 length_t nx, nu, nh, nh_N, nc, nc_N, p;
73 using namespace std::literals::string_literals;
74 if (ffun.n_in() != 3)
76 "Invalid number of input arguments: got "s +
77 std::to_string(ffun.n_in()) + ", should be 3.");
78 if (ffun.n_out() != 1)
80 "Invalid number of output arguments: got "s +
81 std::to_string(ffun.n_in()) + ", should be 1.");
82 nx = static_cast<length_t>(ffun.size1_in(0));
83 nu = static_cast<length_t>(ffun.size1_in(1));
84 p = static_cast<length_t>(ffun.size1_in(2));
86 f.validate_dimensions({dim(nx, 1), dim(nu, 1), dim(p, 1)},
87 {dim(nx, 1)});
88 return f;
89 };
92 using namespace std::literals::string_literals;
93 if (hfun.n_in() != 3)
95 "Invalid number of input arguments: got "s +
96 std::to_string(hfun.n_in()) + ", should be 3.");
97 if (hfun.n_out() != 1)
99 "Invalid number of output arguments: got "s +
100 std::to_string(hfun.n_in()) + ", should be 1.");
101 nh = static_cast<length_t>(hfun.size1_out(0));
103 h.validate_dimensions({dim(nx, 1), dim(nu, 1), dim(p, 1)},
104 {dim(nh, 1)});
105 return h;
106 };
109 using namespace std::literals::string_literals;
110 if (hfun.n_in() != 2)
112 "Invalid number of input arguments: got "s +
113 std::to_string(hfun.n_in()) + ", should be 2.");
114 if (hfun.n_out() != 1)
116 "Invalid number of output arguments: got "s +
117 std::to_string(hfun.n_in()) + ", should be 1.");
118 nh_N = static_cast<length_t>(hfun.size1_out(0));
120 h.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nh_N, 1)});
121 return h;
122 };
125 using namespace std::literals::string_literals;
126 if (cfun.n_in() != 2)
128 "Invalid number of input arguments: got "s +
129 std::to_string(cfun.n_in()) + ", should be 2.");
130 if (cfun.n_out() != 1)
132 "Invalid number of output arguments: got "s +
133 std::to_string(cfun.n_in()) + ", should be 1.");
134 nc = static_cast<length_t>(cfun.size1_out(0));
136 c.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nc, 1)});
137 return c;
138 };
141 using namespace std::literals::string_literals;
142 if (cfun.n_in() != 2)
144 "Invalid number of input arguments: got "s +
145 std::to_string(cfun.n_in()) + ", should be 2.");
146 if (cfun.n_out() != 1)
148 "Invalid number of output arguments: got "s +
149 std::to_string(cfun.n_in()) + ", should be 1.");
150 nc_N = static_cast<length_t>(cfun.size1_out(0));
152 c.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nc_N, 1)});
153 return c;
154 };
155 // Load the functions "f", "h", and "c" to determine the unknown dimensions.
156 auto f = wrap_load(loader, "f", load_f);
157 auto h = wrap_load(loader, "h", load_h);
158 auto h_N = wrap_load(loader, "h_N", load_h_N);
159 auto c = wrap_load(loader, "c", load_c);
160 auto c_N = wrap_load(loader, "c_N", load_c_N);
161
162 auto self = std::make_unique<CasADiControlFunctionsWithParam<Conf>>(
164 .nx = nx,
165 .nu = nu,
166 .nh = nh,
167 .nh_N = nh_N,
168 .nc = nc,
169 .nc_N = nc_N,
170 .p = p,
171 .f = std::move(f),
173 loader, "jacobian_f", dims(nx, nu, p),
174 dims(dim(nx, nx + nu))),
175 .grad_f_prod =
177 loader, "grad_f_prod", dims(nx, nu, p, nx),
178 dims(nx + nu)),
179 .h = std::move(h),
180 .h_N = std::move(h_N),
182 loader, "l", dims(nh, p), dims(1)),
184 loader, "l_N", dims(nh_N, p), dims(1)),
186 loader, "qr", dims(nx + nu, nh, p), dims(nx + nu)),
188 loader, "q_N", dims(nx, nh_N, p), dims(nx)),
190 loader, "Q", dims(nx + nu, nh, p), dims(dim{nx, nx})),
192 loader, "Q_N", dims(nx, nh_N, p), dims(dim{nx, nx})),
194 loader, "R", dims(nx + nu, nh, p), dims(dim{nu, nu})),
196 loader, "S", dims(nx + nu, nh, p), dims(dim{nu, nx})),
197 .c = std::move(c),
198 .grad_c_prod =
200 loader, "grad_c_prod", dims(nx, p, nc), dims(nx)),
202 loader, "gn_hess_c", dims(nx, p, nc), dims(dim{nx, nx})),
203 .c_N = std::move(c_N),
204 .grad_c_prod_N =
206 loader, "grad_c_prod_N", dims(nx, p, nc_N), dims(nx)),
207 .gn_hess_c_N =
209 loader, "gn_hess_c_N", dims(nx, p, nc_N),
210 dims(dim{nx, nx})),
211 });
212 return self;
213 }
214};
215
216} // namespace casadi_loader
217
218template <Config Conf>
220 length_t N,
221 DynamicLoadFlags dl_flags)
222 : N{N} {
223
224 struct {
225 const std::string &filename;
226 DynamicLoadFlags dl_flags;
227 auto operator()(const std::string &name) const {
228#if ALPAQA_WITH_EXTERNAL_CASADI
229 return casadi::external(name, filename);
230#else
231 return casadi::external(name, filename, dl_flags);
232#endif
233 }
234 auto format_name(const std::string &name) const {
235 return filename + ':' + name;
236 }
237 } loader{filename, dl_flags};
239
240 this->nx = impl->nx;
241 this->nu = impl->nu;
242 this->nh = impl->nh;
243 this->nh_N = impl->nh_N;
244 this->nc = impl->nc;
245 this->nc_N = impl->nc_N;
246 this->x_init = vec::Constant(nx, alpaqa::NaN<Conf>);
247 this->param = vec::Constant(impl->p, alpaqa::NaN<Conf>);
248 this->U = Box{nu};
249 this->D = Box{nc};
250 this->D_N = Box{nc_N};
251
252 auto n_work = std::max({
253 impl->Q.fun.sparsity_out(0).nnz(),
254 impl->Q_N.fun.sparsity_out(0).nnz(),
255 impl->gn_hess_c.fun.sparsity_out(0).nnz(),
256 impl->gn_hess_c_N.fun.sparsity_out(0).nnz(),
257 });
258 this->work = vec::Constant(static_cast<length_t>(n_work), NaN<Conf>);
259
260 auto bounds_filepath = fs::path{filename}.replace_extension("csv");
261 if (fs::exists(bounds_filepath))
263}
264
265template <Config Conf>
267 const std::filesystem::path &filepath, char sep) {
268 // Open data file
269 std::ifstream data_file{filepath};
270 if (!data_file)
271 throw std::runtime_error("Unable to open data file \"" +
272 filepath.string() + '"');
273
274 // Helper function for reading single line of (float) data
275 index_t line = 0;
276 auto wrap_data_load = [&](std::string_view name, auto &v,
277 bool fixed_size = true) {
278 try {
279 ++line;
280 if (data_file.peek() == '\n') // Ignore empty lines
281 return static_cast<void>(data_file.get());
282 if (fixed_size) {
284 } else { // Dynamic size
285 auto s = csv::read_row_std_vector<real_t>(data_file, sep);
286 v = cmvec{s.data(), static_cast<index_t>(s.size())};
287 }
288 } catch (csv::read_error &e) {
289 // Transform any errors in something more readable
290 throw std::runtime_error("Unable to read " + std::string(name) +
291 " from data file \"" + filepath.string() +
292 ':' + std::to_string(line) +
293 "\": " + e.what());
294 }
295 };
296 // Helper function for reading a single value
297 auto read_single = [&](std::string_view name, auto &v) {
298 data_file >> v;
299 if (!data_file)
300 throw std::runtime_error("Unable to read " + std::string(name) +
301 " from data file \"" + filepath.string() +
302 ':' + std::to_string(line) + '"');
303 };
304 wrap_data_load("U.lowerbound", this->U.lowerbound);
305 wrap_data_load("U.upperbound", this->U.upperbound);
306 wrap_data_load("D.lowerbound", this->D.lowerbound);
307 wrap_data_load("D.upperbound", this->D.upperbound);
308 wrap_data_load("D_N.lowerbound", this->D_N.lowerbound);
309 wrap_data_load("D_N.upperbound", this->D_N.upperbound);
310 wrap_data_load("x_init", this->x_init);
311 wrap_data_load("param", this->param);
312 // Penalty/ALM split is a single integer
313 read_single("penalty_alm_split", this->penalty_alm_split);
314 read_single("penalty_alm_split_N", this->penalty_alm_split_N);
315}
316
317template <Config Conf>
319 default;
320template <Config Conf>
323
324template <Config Conf>
330
333
336 rvec fxu) const {
337 assert(x.size() == nx);
338 assert(u.size() == nu);
339 assert(fxu.size() == nx);
340 impl->f({x.data(), u.data(), param.data()}, {fxu.data()});
341}
342template <Config Conf>
344 rmat J_fxu) const {
345 assert(x.size() == nx);
346 assert(u.size() == nu);
347 assert(J_fxu.rows() == nx);
348 assert(J_fxu.cols() == nx + nu);
349 impl->jac_f({x.data(), u.data(), param.data()}, {J_fxu.data()});
350}
351template <Config Conf>
353 crvec p,
354 rvec grad_fxu_p) const {
355 assert(x.size() == nx);
356 assert(u.size() == nu);
357 assert(p.size() == nx);
358 assert(grad_fxu_p.size() == nx + nu);
359 impl->grad_f_prod({x.data(), u.data(), param.data(), p.data()},
360 {grad_fxu_p.data()});
361}
362template <Config Conf>
364 rvec h) const {
365 assert(x.size() == nx);
366 assert(u.size() == nu);
367 assert(h.size() == nh);
368 impl->h({x.data(), u.data(), param.data()}, {h.data()});
369}
370template <Config Conf>
372 assert(x.size() == nx);
373 assert(h.size() == nh_N);
374 impl->h_N({x.data(), param.data()}, {h.data()});
375}
376template <Config Conf>
378 assert(h.size() == nh);
379 real_t l;
380 impl->l({h.data(), param.data()}, {&l});
381 return l;
382}
383template <Config Conf>
385 assert(h.size() == nh_N);
386 real_t l;
387 impl->l_N({h.data(), param.data()}, {&l});
388 return l;
389}
390template <Config Conf>
392 rvec qr) const {
393 assert(xu.size() == nx + nu);
394 assert(h.size() == nh);
395 assert(qr.size() == nx + nu);
396 impl->qr({xu.data(), h.data(), param.data()}, {qr.data()});
397}
398template <Config Conf>
400 assert(x.size() == nx);
401 assert(h.size() == nh_N);
402 assert(q.size() == nx);
403 impl->q_N({x.data(), h.data(), param.data()}, {q.data()});
404}
405template <Config Conf>
407 rmat Q) const {
408 assert(xu.size() == nx + nu);
409 assert(h.size() == nh);
410 assert(Q.rows() == nx);
411 assert(Q.cols() == nx);
412 impl->Q({xu.data(), h.data(), param.data()}, {work.data()});
413 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
414 using cmspmat = Eigen::Map<const spmat>;
415 auto &&sparse = impl->Q.fun.sparsity_out(0);
416 if (sparse.is_dense())
417 Q += cmmat{work.data(), nx, nx};
418 else
419 Q += cmspmat{
420 nx,
421 nx,
422 static_cast<length_t>(sparse.nnz()),
423 sparse.colind(),
424 sparse.row(),
425 work.data(),
426 };
427}
428template <Config Conf>
430 assert(x.size() == nx);
431 assert(h.size() == nh_N);
432 assert(Q.rows() == nx);
433 assert(Q.cols() == nx);
434 impl->Q_N({x.data(), h.data(), param.data()}, {work.data()});
435 auto &&sparse = impl->Q_N.fun.sparsity_out(0);
436 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
437 using cmspmat = Eigen::Map<const spmat>;
438 if (sparse.is_dense())
439 Q += cmmat{work.data(), nx, nx};
440 else
441 Q += cmspmat{
442 nx,
443 nx,
444 static_cast<length_t>(sparse.nnz()),
445 sparse.colind(),
446 sparse.row(),
447 work.data(),
448 };
449}
450
451template <Config Conf>
454 rvec work) const {
455 auto &&sparse = impl->R.fun.sparsity_out(0);
456 assert(xu.size() == nx + nu);
457 assert(h.size() == nh);
458 assert(R.rows() <= nu);
459 assert(R.cols() <= nu);
460 assert(R.rows() == mask.size());
461 assert(R.cols() == mask.size());
462 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
463 impl->R({xu.data(), h.data(), param.data()}, {work.data()});
464 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
465 using cmspmat = Eigen::Map<const spmat>;
466 if (sparse.is_dense()) {
467 cmmat R_full{work.data(), nu, nu};
468 R += R_full(mask, mask);
469 } else {
471 nu,
472 nu,
473 static_cast<length_t>(sparse.nnz()),
474 sparse.colind(),
475 sparse.row(),
476 work.data(),
477 };
479 }
480}
481
482template <Config Conf>
485 rvec work) const {
486 auto &&sparse = impl->S.fun.sparsity_out(0);
487 assert(xu.size() == nx + nu);
488 assert(h.size() == nh);
489 assert(S.rows() <= nu);
490 assert(S.rows() == mask.size());
491 assert(S.cols() == nx);
492 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
493 impl->S({xu.data(), h.data(), param.data()}, {work.data()});
494 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
495 using cmspmat = Eigen::Map<const spmat>;
496 using Eigen::indexing::all;
497 if (sparse.is_dense()) {
498 cmmat S_full{work.data(), nu, nx};
499 S += S_full(mask, all);
500 } else {
502 nu,
503 nx,
504 static_cast<length_t>(sparse.nnz()),
505 sparse.colind(),
506 sparse.row(),
507 work.data(),
508 };
510 }
511}
512
513template <Config Conf>
517 crvec v, rvec out,
518 rvec work) const {
519 auto &&sparse = impl->R.fun.sparsity_out(0);
520 assert(v.size() == nu);
521 assert(out.size() == mask_J.size());
522 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
523 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
524 using cmspmat = Eigen::Map<const spmat>;
525 if (sparse.is_dense()) {
526 auto R = cmmat{work.data(), nu, nu};
527 out.noalias() += R(mask_J, mask_K) * v(mask_K);
528 } else {
529 cmspmat R{
530 nu,
531 nu,
532 static_cast<length_t>(sparse.nnz()),
533 sparse.colind(),
534 sparse.row(),
535 work.data(),
536 };
537 // out += R_full(mask_J,mask_K) * v(mask_K);
539 }
540}
541
542template <Config Conf>
545 crvec v, rvec out,
546 rvec work) const {
547 auto &&sparse = impl->S.fun.sparsity_out(0);
548 assert(v.size() == nu);
549 assert(out.size() == nx);
550 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
551 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
552 using cmspmat = Eigen::Map<const spmat>;
553 using Eigen::indexing::all;
554 if (sparse.is_dense()) {
555 auto Sᵀ = cmmat{work.data(), nu, nx}.transpose();
556 out.noalias() += Sᵀ(all, mask_K) * v(mask_K);
557 } else {
558 cmspmat S{
559 nu,
560 nx,
561 static_cast<length_t>(sparse.nnz()),
562 sparse.colind(),
563 sparse.row(),
564 work.data(),
565 };
566 // out += S(mask_K,:)ᵀ * v(mask_K);
568 }
569}
570
571template <Config Conf>
573 auto &&sparse = impl->R.fun.sparsity_out(0);
574 return static_cast<length_t>(sparse.nnz());
575}
576
577template <Config Conf>
579 auto &&sparse = impl->S.fun.sparsity_out(0);
580 return static_cast<length_t>(sparse.nnz());
581}
582
583template <Config Conf>
585 if (nc == 0)
586 return;
587 assert(x.size() == nx);
588 assert(c.size() == nc);
589 impl->c({x.data(), param.data()}, {c.data()});
590}
591
592template <Config Conf>
594 crvec p,
595 rvec grad_cx_p) const {
596 assert(x.size() == nx);
597 assert(p.size() == nc);
598 assert(grad_cx_p.size() == nx);
599 impl->grad_c_prod({x.data(), param.data(), p.data()}, {grad_cx_p.data()});
600}
601
602template <Config Conf>
604 crvec M,
605 rmat out) const {
606 auto &&sparse = impl->gn_hess_c.fun.sparsity_out(0);
607 assert(x.size() == nx);
608 assert(M.size() == nc);
609 assert(out.rows() == nx);
610 assert(out.cols() == nx);
611 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
612 impl->gn_hess_c({x.data(), param.data(), M.data()}, {work.data()});
613 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
614 using cmspmat = Eigen::Map<const spmat>;
615 if (sparse.is_dense())
616 out += cmmat{work.data(), nx, nx};
617 else
618 out += cmspmat{
619 nx,
620 nx,
621 static_cast<length_t>(sparse.nnz()),
622 sparse.colind(),
623 sparse.row(),
624 work.data(),
625 };
626}
627
628template <Config Conf>
630 if (nc_N == 0)
631 return;
632 assert(x.size() == nx);
633 assert(c.size() == nc_N);
634 impl->c_N({x.data(), param.data()}, {c.data()});
635}
636
637template <Config Conf>
639 rvec grad_cx_p) const {
640 assert(x.size() == nx);
641 assert(p.size() == nc_N);
642 assert(grad_cx_p.size() == nx);
643 impl->grad_c_prod_N({x.data(), param.data(), p.data()}, {grad_cx_p.data()});
644}
645
646template <Config Conf>
648 rmat out) const {
649 auto &&sparse = impl->gn_hess_c.fun.sparsity_out(0);
650 assert(x.size() == nx);
651 assert(M.size() == nc_N);
652 assert(out.rows() == nx);
653 assert(out.cols() == nx);
654 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
655 impl->gn_hess_c_N({x.data(), param.data(), M.data()}, {work.data()});
656 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
657 using cmspmat = Eigen::Map<const spmat>;
658 if (sparse.is_dense())
659 out += cmmat{work.data(), nx, nx};
660 else
661 out += cmspmat{
662 nx,
663 nx,
664 static_cast<length_t>(sparse.nnz()),
665 sparse.colind(),
666 sparse.row(),
667 work.data(),
668 };
669}
670
672} // namespace alpaqa
#define BEGIN_ALPAQA_CASADI_LOADER_NAMESPACE
#define END_ALPAQA_CASADI_LOADER_NAMESPACE
void eval_jac_f(index_t timestep, crvec x, crvec u, rmat J_fxu) const
void eval_add_R_prod_masked(index_t timestep, crvec xu, crvec h, crindexvec mask_J, crindexvec mask_K, crvec v, rvec out, rvec work) const
CasADiControlProblem & operator=(const CasADiControlProblem &)
void eval_add_gn_hess_constr_N(crvec x, crvec M, rmat out) const
void eval_qr(index_t timestep, crvec xu, crvec h, rvec qr) const
void eval_add_S_prod_masked(index_t timestep, crvec xu, crvec h, crindexvec mask_K, crvec v, rvec out, rvec work) const
void eval_constr_N(crvec x, rvec c) const
void load_numerical_data(const std::filesystem::path &filepath, char sep=',')
Load the numerical problem data (bounds and parameters) from a CSV file.
void eval_grad_constr_prod_N(crvec x, crvec p, rvec grad_cx_p) const
void eval_add_R_masked(index_t timestep, crvec xu, crvec h, crindexvec mask, rmat R, rvec work) const
real_t eval_l(index_t timestep, crvec h) const
void eval_grad_f_prod(index_t timestep, crvec x, crvec u, crvec p, rvec grad_fxu_p) const
void eval_add_gn_hess_constr(index_t timestep, crvec x, crvec M, rmat out) const
void eval_constr(index_t timestep, crvec x, rvec c) const
CasADiControlProblem(const std::string &filename, length_t N, DynamicLoadFlags dl_flags={})
void eval_h(index_t timestep, crvec x, crvec u, rvec h) const
util::copyable_unique_ptr< Functions > impl
void eval_add_S_masked(index_t timestep, crvec xu, crvec h, crindexvec mask, rmat S, rvec work) const
void eval_q_N(crvec x, crvec h, rvec q) const
void eval_add_Q_N(crvec x, crvec h, rmat Q) const
void eval_grad_constr_prod(index_t timestep, crvec x, crvec p, rvec grad_cx_p) const
void eval_add_Q(index_t timestep, crvec xu, crvec h, rmat Q) const
void eval_h_N(crvec x, rvec h) const
Class that loads and calls pre-compiled CasADi functions in a DLL/SO file.
Class for evaluating CasADi functions, allocating the necessary workspace storage in advance for allo...
static void validate_dimensions(const casadi::Function &fun, const std::array< casadi_dim, N_in > &dim_in={}, const std::array< casadi_dim, N_out > &dim_out={})
#define USING_ALPAQA_CONFIG(Conf)
Definition config.hpp:77
auto wrapped_load(Loader &&loader, const char *name, Args &&...args)
constexpr auto dims(auto... a)
std::pair< casadi_int, casadi_int > dim
auto wrap_load(Loader &&loader, const char *name, F f)
Function external(const std::string &name, const std::string &bin_name, DynamicLoadFlags dl_flags)
Load the given CasADi function from the given DLL/SO file.
void read_row(std::istream &is, Eigen::Ref< Eigen::VectorX< Eigen::Index > > v, char sep)
Definition csv.hpp:37
void sparse_matvec_add_masked_rows_cols(const SpMat &R, const CVec &v, Vec &&out, const MaskVec &mask_J, const MaskVec &mask_K)
out += R(mask_J,mask_K) * v(mask_K);
void sparse_add_masked_rows(const SpMat &S_full, Mat &&S, const MaskVec &mask)
S += S_full(mask,:)
void sparse_add_masked(const SpMat &R_full, Mat &&R, const MaskVec &mask)
R += R_full(mask,mask)
void sparse_matvec_add_transpose_masked_rows(const SpMat &S, const CVec &v, Vec &&out, const MaskVec &mask)
out += S(mask,:)ᵀ * v(mask);
typename Conf::rmat rmat
Definition config.hpp:96
typename Conf::cmmat cmmat
Definition config.hpp:95
typename Conf::real_t real_t
Definition config.hpp:86
typename Conf::index_t index_t
Definition config.hpp:104
typename Conf::length_t length_t
Definition config.hpp:103
typename Conf::cmvec cmvec
Definition config.hpp:90
constexpr const auto inf
Definition config.hpp:112
typename Conf::rvec rvec
Definition config.hpp:91
typename Conf::crvec crvec
Definition config.hpp:92
typename Conf::crindexvec crindexvec
Definition config.hpp:107
Flags to be passed to dlopen.
Definition dl-flags.hpp:8
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > qr
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > jac_f
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > f
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > c
static std::unique_ptr< CasADiControlFunctionsWithParam > load(Loader &&loader)
CasADiFunctionEvaluator< Conf, 3+WithParam, 1 > grad_f_prod
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > R
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > grad_c_prod
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > q_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > grad_c_prod_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > h_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > l
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > Q
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > S
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > gn_hess_c_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > gn_hess_c
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > l_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > Q_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > c_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > h