alpaqa 1.0.0a15
Nonconvex constrained optimization
Loading...
Searching...
No Matches
CasADiControlProblem.tpp
Go to the documentation of this file.
1#pragma once
2
10
11#include <Eigen/Sparse>
12#include <casadi/core/external.hpp>
13
14#include <memory>
15#include <optional>
16#include <stdexcept>
17#include <type_traits>
18
19namespace alpaqa {
20
21namespace fs = std::filesystem;
22
23namespace casadi_loader {
24
25using namespace alpaqa::casadi_loader;
26
27template <Config Conf>
30
31 static constexpr bool WithParam = true;
40
47
51
55
56 template <class Loader>
57 requires requires(Loader &&loader, const char *name) {
58 { loader(name) } -> std::same_as<casadi::Function>;
59 { loader.format_name(name) } -> std::same_as<std::string>;
60 }
61 static std::unique_ptr<CasADiControlFunctionsWithParam>
63 length_t nx, nu, nh, nh_N, nc, nc_N, p;
65 casadi::Function ffun = loader("f");
66 using namespace std::literals::string_literals;
67 if (ffun.n_in() != 3)
69 "Invalid number of input arguments: got "s +
70 std::to_string(ffun.n_in()) + ", should be 3.");
71 if (ffun.n_out() != 1)
73 "Invalid number of output arguments: got "s +
74 std::to_string(ffun.n_in()) + ", should be 1.");
75 nx = static_cast<length_t>(ffun.size1_in(0));
76 nu = static_cast<length_t>(ffun.size1_in(1));
77 p = static_cast<length_t>(ffun.size1_in(2));
79 f.validate_dimensions({dim(nx, 1), dim(nu, 1), dim(p, 1)},
80 {dim(nx, 1)});
81 return f;
82 };
84 casadi::Function hfun = loader("h");
85 using namespace std::literals::string_literals;
86 if (hfun.n_in() != 3)
88 "Invalid number of input arguments: got "s +
89 std::to_string(hfun.n_in()) + ", should be 3.");
90 if (hfun.n_out() != 1)
92 "Invalid number of output arguments: got "s +
93 std::to_string(hfun.n_in()) + ", should be 1.");
94 nh = static_cast<length_t>(hfun.size1_out(0));
96 h.validate_dimensions({dim(nx, 1), dim(nu, 1), dim(p, 1)},
97 {dim(nh, 1)});
98 return h;
99 };
101 casadi::Function hfun = loader("h_N");
102 using namespace std::literals::string_literals;
103 if (hfun.n_in() != 2)
105 "Invalid number of input arguments: got "s +
106 std::to_string(hfun.n_in()) + ", should be 2.");
107 if (hfun.n_out() != 1)
109 "Invalid number of output arguments: got "s +
110 std::to_string(hfun.n_in()) + ", should be 1.");
111 nh_N = static_cast<length_t>(hfun.size1_out(0));
113 h.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nh_N, 1)});
114 return h;
115 };
117 casadi::Function cfun = loader("c");
118 using namespace std::literals::string_literals;
119 if (cfun.n_in() != 2)
121 "Invalid number of input arguments: got "s +
122 std::to_string(cfun.n_in()) + ", should be 2.");
123 if (cfun.n_out() != 1)
125 "Invalid number of output arguments: got "s +
126 std::to_string(cfun.n_in()) + ", should be 1.");
127 nc = static_cast<length_t>(cfun.size1_out(0));
129 c.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nc, 1)});
130 return c;
131 };
133 casadi::Function cfun = loader("c_N");
134 using namespace std::literals::string_literals;
135 if (cfun.n_in() != 2)
137 "Invalid number of input arguments: got "s +
138 std::to_string(cfun.n_in()) + ", should be 2.");
139 if (cfun.n_out() != 1)
141 "Invalid number of output arguments: got "s +
142 std::to_string(cfun.n_in()) + ", should be 1.");
143 nc_N = static_cast<length_t>(cfun.size1_out(0));
145 c.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nc_N, 1)});
146 return c;
147 };
148 // Load the functions "f", "h", and "c" to determine the unknown dimensions.
149 auto f = wrap_load(loader, "f", load_f);
150 auto h = wrap_load(loader, "h", load_h);
151 auto h_N = wrap_load(loader, "h_N", load_h_N);
152 auto c = wrap_load(loader, "c", load_c);
153 auto c_N = wrap_load(loader, "c_N", load_c_N);
154
155 auto self = std::make_unique<CasADiControlFunctionsWithParam<Conf>>(
157 .nx = nx,
158 .nu = nu,
159 .nh = nh,
160 .nh_N = nh_N,
161 .nc = nc,
162 .nc_N = nc_N,
163 .p = p,
164 .f = std::move(f),
166 loader, "jacobian_f", dims(nx, nu, p),
167 dims(dim(nx, nx + nu))),
168 .grad_f_prod =
170 loader, "grad_f_prod", dims(nx, nu, p, nx),
171 dims(nx + nu)),
172 .h = std::move(h),
173 .h_N = std::move(h_N),
175 loader, "l", dims(nh, p), dims(1)),
177 loader, "l_N", dims(nh_N, p), dims(1)),
179 loader, "qr", dims(nx + nu, nh, p), dims(nx + nu)),
181 loader, "q_N", dims(nx, nh_N, p), dims(nx)),
183 loader, "Q", dims(nx + nu, nh, p), dims(dim{nx, nx})),
185 loader, "Q_N", dims(nx, nh_N, p), dims(dim{nx, nx})),
187 loader, "R", dims(nx + nu, nh, p), dims(dim{nu, nu})),
189 loader, "S", dims(nx + nu, nh, p), dims(dim{nu, nx})),
190 .c = std::move(c),
191 .grad_c_prod =
193 loader, "grad_c_prod", dims(nx, p, nc), dims(nx)),
195 loader, "gn_hess_c", dims(nx, p, nc), dims(dim{nx, nx})),
196 .c_N = std::move(c_N),
197 .grad_c_prod_N =
199 loader, "grad_c_prod_N", dims(nx, p, nc_N), dims(nx)),
200 .gn_hess_c_N =
202 loader, "gn_hess_c_N", dims(nx, p, nc_N),
203 dims(dim{nx, nx})),
204 });
205 return self;
206 }
207};
208
209} // namespace casadi_loader
210
211template <Config Conf>
213 length_t N)
214 : N{N} {
215
216 struct {
217 const std::string &filename;
218 auto operator()(const std::string &name) const {
219 return casadi::external(name, filename);
220 }
221 auto format_name(const std::string &name) const {
222 return filename + ':' + name;
223 }
224 } loader{filename};
226
227 this->nx = impl->nx;
228 this->nu = impl->nu;
229 this->nh = impl->nh;
230 this->nh_N = impl->nh_N;
231 this->nc = impl->nc;
232 this->nc_N = impl->nc_N;
233 this->x_init = vec::Constant(nx, alpaqa::NaN<Conf>);
234 this->param = vec::Constant(impl->p, alpaqa::NaN<Conf>);
235 this->U = Box{nu};
236 this->D = Box{nc};
237 this->D_N = Box{nc_N};
238
239 auto n_work = std::max({
240 impl->Q.fun.sparsity_out(0).nnz(),
241 impl->Q_N.fun.sparsity_out(0).nnz(),
242 impl->gn_hess_c.fun.sparsity_out(0).nnz(),
243 impl->gn_hess_c_N.fun.sparsity_out(0).nnz(),
244 });
245 this->work = vec::Constant(static_cast<length_t>(n_work), NaN<Conf>);
246
247 auto bounds_filepath = fs::path{filename}.replace_extension("csv");
248 if (fs::exists(bounds_filepath))
250}
251
252template <Config Conf>
254 const std::filesystem::path &filepath, char sep) {
255 // Open data file
256 std::ifstream data_file{filepath};
257 if (!data_file)
258 throw std::runtime_error("Unable to open data file \"" +
259 filepath.string() + '"');
260
261 // Helper function for reading single line of (float) data
262 index_t line = 0;
263 auto wrap_data_load = [&](std::string_view name, auto &v,
264 bool fixed_size = true) {
265 try {
266 ++line;
267 if (data_file.peek() == '\n') // Ignore empty lines
268 return static_cast<void>(data_file.get());
269 if (fixed_size) {
271 } else { // Dynamic size
272 auto s = csv::read_row_std_vector<real_t>(data_file, sep);
273 v = cmvec{s.data(), static_cast<index_t>(s.size())};
274 }
275 } catch (csv::read_error &e) {
276 // Transform any errors in something more readable
277 throw std::runtime_error("Unable to read " + std::string(name) +
278 " from data file \"" + filepath.string() +
279 ':' + std::to_string(line) +
280 "\": " + e.what());
281 }
282 };
283 // Helper function for reading a single value
284 auto read_single = [&](std::string_view name, auto &v) {
285 data_file >> v;
286 if (!data_file)
287 throw std::runtime_error("Unable to read " + std::string(name) +
288 " from data file \"" + filepath.string() +
289 ':' + std::to_string(line) + '"');
290 };
291 wrap_data_load("U.lowerbound", this->U.lowerbound);
292 wrap_data_load("U.upperbound", this->U.upperbound);
293 wrap_data_load("D.lowerbound", this->D.lowerbound);
294 wrap_data_load("D.upperbound", this->D.upperbound);
295 wrap_data_load("D_N.lowerbound", this->D_N.lowerbound);
296 wrap_data_load("D_N.upperbound", this->D_N.upperbound);
297 wrap_data_load("x_init", this->x_init);
298 wrap_data_load("param", this->param);
299 // Penalty/ALM split is a single integer
300 read_single("penalty_alm_split", this->penalty_alm_split);
301 read_single("penalty_alm_split_N", this->penalty_alm_split_N);
302}
303
304template <Config Conf>
306 default;
307template <Config Conf>
310
311template <Config Conf>
317
320
323 rvec fxu) const {
324 assert(x.size() == nx);
325 assert(u.size() == nu);
326 assert(fxu.size() == nx);
327 impl->f({x.data(), u.data(), param.data()}, {fxu.data()});
328}
329template <Config Conf>
331 rmat J_fxu) const {
332 assert(x.size() == nx);
333 assert(u.size() == nu);
334 assert(J_fxu.rows() == nx);
335 assert(J_fxu.cols() == nx + nu);
336 impl->jac_f({x.data(), u.data(), param.data()}, {J_fxu.data()});
337}
338template <Config Conf>
340 crvec p,
341 rvec grad_fxu_p) const {
342 assert(x.size() == nx);
343 assert(u.size() == nu);
344 assert(p.size() == nx);
345 assert(grad_fxu_p.size() == nx + nu);
346 impl->grad_f_prod({x.data(), u.data(), param.data(), p.data()},
347 {grad_fxu_p.data()});
348}
349template <Config Conf>
351 rvec h) const {
352 assert(x.size() == nx);
353 assert(u.size() == nu);
354 assert(h.size() == nh);
355 impl->h({x.data(), u.data(), param.data()}, {h.data()});
356}
357template <Config Conf>
359 assert(x.size() == nx);
360 assert(h.size() == nh_N);
361 impl->h_N({x.data(), param.data()}, {h.data()});
362}
363template <Config Conf>
365 assert(h.size() == nh);
366 real_t l;
367 impl->l({h.data(), param.data()}, {&l});
368 return l;
369}
370template <Config Conf>
372 assert(h.size() == nh_N);
373 real_t l;
374 impl->l_N({h.data(), param.data()}, {&l});
375 return l;
376}
377template <Config Conf>
379 rvec qr) const {
380 assert(xu.size() == nx + nu);
381 assert(h.size() == nh);
382 assert(qr.size() == nx + nu);
383 impl->qr({xu.data(), h.data(), param.data()}, {qr.data()});
384}
385template <Config Conf>
387 assert(x.size() == nx);
388 assert(h.size() == nh_N);
389 assert(q.size() == nx);
390 impl->q_N({x.data(), h.data(), param.data()}, {q.data()});
391}
392template <Config Conf>
394 rmat Q) const {
395 assert(xu.size() == nx + nu);
396 assert(h.size() == nh);
397 assert(Q.rows() == nx);
398 assert(Q.cols() == nx);
399 impl->Q({xu.data(), h.data(), param.data()}, {work.data()});
400 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
401 using cmspmat = Eigen::Map<const spmat>;
402 auto &&sparse = impl->Q.fun.sparsity_out(0);
403 if (sparse.is_dense())
404 Q += cmmat{work.data(), nx, nx};
405 else
406 Q += cmspmat{
407 nx,
408 nx,
409 static_cast<length_t>(sparse.nnz()),
410 sparse.colind(),
411 sparse.row(),
412 work.data(),
413 };
414}
415template <Config Conf>
417 assert(x.size() == nx);
418 assert(h.size() == nh_N);
419 assert(Q.rows() == nx);
420 assert(Q.cols() == nx);
421 impl->Q_N({x.data(), h.data(), param.data()}, {work.data()});
422 auto &&sparse = impl->Q_N.fun.sparsity_out(0);
423 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
424 using cmspmat = Eigen::Map<const spmat>;
425 if (sparse.is_dense())
426 Q += cmmat{work.data(), nx, nx};
427 else
428 Q += cmspmat{
429 nx,
430 nx,
431 static_cast<length_t>(sparse.nnz()),
432 sparse.colind(),
433 sparse.row(),
434 work.data(),
435 };
436}
437
438template <Config Conf>
441 rvec work) const {
442 auto &&sparse = impl->R.fun.sparsity_out(0);
443 assert(xu.size() == nx + nu);
444 assert(h.size() == nh);
445 assert(R.rows() <= nu);
446 assert(R.cols() <= nu);
447 assert(R.rows() == mask.size());
448 assert(R.cols() == mask.size());
449 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
450 impl->R({xu.data(), h.data(), param.data()}, {work.data()});
451 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
452 using cmspmat = Eigen::Map<const spmat>;
453 if (sparse.is_dense()) {
454 cmmat R_full{work.data(), nu, nu};
455 R += R_full(mask, mask);
456 } else {
458 nu,
459 nu,
460 static_cast<length_t>(sparse.nnz()),
461 sparse.colind(),
462 sparse.row(),
463 work.data(),
464 };
466 }
467}
468
469template <Config Conf>
472 rvec work) const {
473 auto &&sparse = impl->S.fun.sparsity_out(0);
474 assert(xu.size() == nx + nu);
475 assert(h.size() == nh);
476 assert(S.rows() <= nu);
477 assert(S.rows() == mask.size());
478 assert(S.cols() == nx);
479 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
480 impl->S({xu.data(), h.data(), param.data()}, {work.data()});
481 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
482 using cmspmat = Eigen::Map<const spmat>;
483 using Eigen::indexing::all;
484 if (sparse.is_dense()) {
485 cmmat S_full{work.data(), nu, nx};
486 S += S_full(mask, all);
487 } else {
489 nu,
490 nx,
491 static_cast<length_t>(sparse.nnz()),
492 sparse.colind(),
493 sparse.row(),
494 work.data(),
495 };
497 }
498}
499
500template <Config Conf>
504 crvec v, rvec out,
505 rvec work) const {
506 auto &&sparse = impl->R.fun.sparsity_out(0);
507 assert(v.size() == nu);
508 assert(out.size() == mask_J.size());
509 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
510 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
511 using cmspmat = Eigen::Map<const spmat>;
512 if (sparse.is_dense()) {
513 auto R = cmmat{work.data(), nu, nu};
514 out.noalias() += R(mask_J, mask_K) * v(mask_K);
515 } else {
516 cmspmat R{
517 nu,
518 nu,
519 static_cast<length_t>(sparse.nnz()),
520 sparse.colind(),
521 sparse.row(),
522 work.data(),
523 };
524 // out += R_full(mask_J,mask_K) * v(mask_K);
526 }
527}
528
529template <Config Conf>
532 crvec v, rvec out,
533 rvec work) const {
534 auto &&sparse = impl->S.fun.sparsity_out(0);
535 assert(v.size() == nu);
536 assert(out.size() == nx);
537 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
538 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
539 using cmspmat = Eigen::Map<const spmat>;
540 using Eigen::indexing::all;
541 if (sparse.is_dense()) {
542 auto Sᵀ = cmmat{work.data(), nu, nx}.transpose();
543 out.noalias() += Sᵀ(all, mask_K) * v(mask_K);
544 } else {
545 cmspmat S{
546 nu,
547 nx,
548 static_cast<length_t>(sparse.nnz()),
549 sparse.colind(),
550 sparse.row(),
551 work.data(),
552 };
553 // out += S(mask_K,:)ᵀ * v(mask_K);
555 }
556}
557
558template <Config Conf>
560 auto &&sparse = impl->R.fun.sparsity_out(0);
561 return static_cast<length_t>(sparse.nnz());
562}
563
564template <Config Conf>
566 auto &&sparse = impl->S.fun.sparsity_out(0);
567 return static_cast<length_t>(sparse.nnz());
568}
569
570template <Config Conf>
572 if (nc == 0)
573 return;
574 assert(x.size() == nx);
575 assert(c.size() == nc);
576 impl->c({x.data(), param.data()}, {c.data()});
577}
578
579template <Config Conf>
581 crvec p,
582 rvec grad_cx_p) const {
583 assert(x.size() == nx);
584 assert(p.size() == nc);
585 assert(grad_cx_p.size() == nx);
586 impl->grad_c_prod({x.data(), param.data(), p.data()}, {grad_cx_p.data()});
587}
588
589template <Config Conf>
591 crvec M,
592 rmat out) const {
593 auto &&sparse = impl->gn_hess_c.fun.sparsity_out(0);
594 assert(x.size() == nx);
595 assert(M.size() == nc);
596 assert(out.rows() == nx);
597 assert(out.cols() == nx);
598 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
599 impl->gn_hess_c({x.data(), param.data(), M.data()}, {work.data()});
600 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
601 using cmspmat = Eigen::Map<const spmat>;
602 if (sparse.is_dense())
603 out += cmmat{work.data(), nx, nx};
604 else
605 out += cmspmat{
606 nx,
607 nx,
608 static_cast<length_t>(sparse.nnz()),
609 sparse.colind(),
610 sparse.row(),
611 work.data(),
612 };
613}
614
615template <Config Conf>
617 if (nc_N == 0)
618 return;
619 assert(x.size() == nx);
620 assert(c.size() == nc_N);
621 impl->c_N({x.data(), param.data()}, {c.data()});
622}
623
624template <Config Conf>
626 rvec grad_cx_p) const {
627 assert(x.size() == nx);
628 assert(p.size() == nc_N);
629 assert(grad_cx_p.size() == nx);
630 impl->grad_c_prod_N({x.data(), param.data(), p.data()}, {grad_cx_p.data()});
631}
632
633template <Config Conf>
635 rmat out) const {
636 auto &&sparse = impl->gn_hess_c.fun.sparsity_out(0);
637 assert(x.size() == nx);
638 assert(M.size() == nc_N);
639 assert(out.rows() == nx);
640 assert(out.cols() == nx);
641 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
642 impl->gn_hess_c_N({x.data(), param.data(), M.data()}, {work.data()});
643 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
644 using cmspmat = Eigen::Map<const spmat>;
645 if (sparse.is_dense())
646 out += cmmat{work.data(), nx, nx};
647 else
648 out += cmspmat{
649 nx,
650 nx,
651 static_cast<length_t>(sparse.nnz()),
652 sparse.colind(),
653 sparse.row(),
654 work.data(),
655 };
656}
657
658} // namespace alpaqa
void eval_jac_f(index_t timestep, crvec x, crvec u, rmat J_fxu) const
void eval_add_R_prod_masked(index_t timestep, crvec xu, crvec h, crindexvec mask_J, crindexvec mask_K, crvec v, rvec out, rvec work) const
CasADiControlProblem & operator=(const CasADiControlProblem &)
void eval_add_gn_hess_constr_N(crvec x, crvec M, rmat out) const
void eval_qr(index_t timestep, crvec xu, crvec h, rvec qr) const
void eval_add_S_prod_masked(index_t timestep, crvec xu, crvec h, crindexvec mask_K, crvec v, rvec out, rvec work) const
void eval_constr_N(crvec x, rvec c) const
void load_numerical_data(const std::filesystem::path &filepath, char sep=',')
Load the numerical problem data (bounds and parameters) from a CSV file.
void eval_grad_constr_prod_N(crvec x, crvec p, rvec grad_cx_p) const
void eval_add_R_masked(index_t timestep, crvec xu, crvec h, crindexvec mask, rmat R, rvec work) const
real_t eval_l(index_t timestep, crvec h) const
void eval_grad_f_prod(index_t timestep, crvec x, crvec u, crvec p, rvec grad_fxu_p) const
void eval_add_gn_hess_constr(index_t timestep, crvec x, crvec M, rmat out) const
void eval_constr(index_t timestep, crvec x, rvec c) const
void eval_h(index_t timestep, crvec x, crvec u, rvec h) const
util::copyable_unique_ptr< Functions > impl
void eval_add_S_masked(index_t timestep, crvec xu, crvec h, crindexvec mask, rmat S, rvec work) const
void eval_q_N(crvec x, crvec h, rvec q) const
void eval_add_Q_N(crvec x, crvec h, rmat Q) const
void eval_grad_constr_prod(index_t timestep, crvec x, crvec p, rvec grad_cx_p) const
void eval_add_Q(index_t timestep, crvec xu, crvec h, rmat Q) const
CasADiControlProblem(const std::string &filename, length_t N)
void eval_h_N(crvec x, rvec h) const
Class for evaluating CasADi functions, allocating the necessary workspace storage in advance for allo...
static void validate_dimensions(const casadi::Function &fun, const std::array< casadi_dim, N_in > &dim_in={}, const std::array< casadi_dim, N_out > &dim_out={})
#define USING_ALPAQA_CONFIG(Conf)
Definition config.hpp:56
auto wrapped_load(Loader &&loader, const char *name, Args &&...args)
constexpr auto dims(auto... a)
std::pair< casadi_int, casadi_int > dim
auto wrap_load(Loader &&loader, const char *name, F f)
void read_row(std::istream &is, Eigen::Ref< Eigen::VectorX< Eigen::Index > > v, char sep)
Definition csv.hpp:37
void sparse_matvec_add_masked_rows_cols(const SpMat &R, const CVec &v, Vec &&out, const MaskVec &mask_J, const MaskVec &mask_K)
out += R(mask_J,mask_K) * v(mask_K);
void sparse_add_masked_rows(const SpMat &S_full, Mat &&S, const MaskVec &mask)
S += S_full(mask,:)
void sparse_add_masked(const SpMat &R_full, Mat &&R, const MaskVec &mask)
R += R_full(mask,mask)
void sparse_matvec_add_transpose_masked_rows(const SpMat &S, const CVec &v, Vec &&out, const MaskVec &mask)
out += S(mask,:)ᵀ * v(mask);
typename Conf::rmat rmat
Definition config.hpp:74
typename Conf::cmmat cmmat
Definition config.hpp:73
typename Conf::real_t real_t
Definition config.hpp:65
typename Conf::index_t index_t
Definition config.hpp:77
typename Conf::length_t length_t
Definition config.hpp:76
typename Conf::cmvec cmvec
Definition config.hpp:68
constexpr const auto inf
Definition config.hpp:85
typename Conf::rvec rvec
Definition config.hpp:69
typename Conf::crvec crvec
Definition config.hpp:70
typename Conf::crindexvec crindexvec
Definition config.hpp:80
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > qr
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > jac_f
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > f
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > c
static std::unique_ptr< CasADiControlFunctionsWithParam > load(Loader &&loader)
CasADiFunctionEvaluator< Conf, 3+WithParam, 1 > grad_f_prod
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > R
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > grad_c_prod
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > q_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > grad_c_prod_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > h_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > l
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > Q
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > S
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > gn_hess_c_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > gn_hess_c
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > l_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > Q_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > c_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > h