alpaqa sparse
Nonconvex constrained optimization
Loading...
Searching...
No Matches
CasADiControlProblem.tpp
Go to the documentation of this file.
1#pragma once
2
10
11#include <Eigen/Sparse>
12#include <casadi/core/external.hpp>
13
14#include <memory>
15#include <optional>
16#include <stdexcept>
17#include <type_traits>
18
19namespace alpaqa {
20
21namespace fs = std::filesystem;
22
23namespace casadi_loader {
24
25using namespace alpaqa::casadi_loader;
26
27template <Config Conf>
55
56} // namespace casadi_loader
57
58template <Config Conf>
60 length_t N)
61 : N{N} {
62 length_t p;
63 using namespace casadi_loader;
65 casadi::Function ffun = casadi::external("f", so_name);
66 using namespace std::literals::string_literals;
67 if (ffun.n_in() != 3)
68 throw std::invalid_argument(
69 "Invalid number of input arguments: got "s +
70 std::to_string(ffun.n_in()) + ", should be 3.");
71 if (ffun.n_out() != 1)
72 throw std::invalid_argument(
73 "Invalid number of output arguments: got "s +
74 std::to_string(ffun.n_in()) + ", should be 1.");
75 nx = static_cast<length_t>(ffun.size1_in(0));
76 nu = static_cast<length_t>(ffun.size1_in(1));
77 p = static_cast<length_t>(ffun.size1_in(2));
79 f.validate_dimensions({dim(nx, 1), dim(nu, 1), dim(p, 1)},
80 {dim(nx, 1)});
81 return f;
82 };
84 casadi::Function hfun = casadi::external("h", so_name);
85 using namespace std::literals::string_literals;
86 if (hfun.n_in() != 3)
87 throw std::invalid_argument(
88 "Invalid number of input arguments: got "s +
89 std::to_string(hfun.n_in()) + ", should be 3.");
90 if (hfun.n_out() != 1)
91 throw std::invalid_argument(
92 "Invalid number of output arguments: got "s +
93 std::to_string(hfun.n_in()) + ", should be 1.");
94 nh = static_cast<length_t>(hfun.size1_out(0));
96 h.validate_dimensions({dim(nx, 1), dim(nu, 1), dim(p, 1)},
97 {dim(nh, 1)});
98 return h;
99 };
101 casadi::Function hfun = casadi::external("h_N", so_name);
102 using namespace std::literals::string_literals;
103 if (hfun.n_in() != 2)
104 throw std::invalid_argument(
105 "Invalid number of input arguments: got "s +
106 std::to_string(hfun.n_in()) + ", should be 2.");
107 if (hfun.n_out() != 1)
108 throw std::invalid_argument(
109 "Invalid number of output arguments: got "s +
110 std::to_string(hfun.n_in()) + ", should be 1.");
111 nh_N = static_cast<length_t>(hfun.size1_out(0));
113 h.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nh_N, 1)});
114 return h;
115 };
117 casadi::Function cfun = casadi::external("c", so_name);
118 using namespace std::literals::string_literals;
119 if (cfun.n_in() != 2)
120 throw std::invalid_argument(
121 "Invalid number of input arguments: got "s +
122 std::to_string(cfun.n_in()) + ", should be 2.");
123 if (cfun.n_out() != 1)
124 throw std::invalid_argument(
125 "Invalid number of output arguments: got "s +
126 std::to_string(cfun.n_in()) + ", should be 1.");
127 nc = static_cast<length_t>(cfun.size1_out(0));
129 c.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nc, 1)});
130 return c;
131 };
133 casadi::Function cfun = casadi::external("c_N", so_name);
134 using namespace std::literals::string_literals;
135 if (cfun.n_in() != 2)
136 throw std::invalid_argument(
137 "Invalid number of input arguments: got "s +
138 std::to_string(cfun.n_in()) + ", should be 2.");
139 if (cfun.n_out() != 1)
140 throw std::invalid_argument(
141 "Invalid number of output arguments: got "s +
142 std::to_string(cfun.n_in()) + ", should be 1.");
143 nc_N = static_cast<length_t>(cfun.size1_out(0));
145 c.validate_dimensions({dim(nx, 1), dim(p, 1)}, {dim(nc_N, 1)});
146 return c;
147 };
148 // Load the functions "f", "h", and "c" to determine the unknown dimensions.
149 auto f = wrap_load(so_name, "f", load_f);
150 auto h = wrap_load(so_name, "h", load_h);
151 auto h_N = wrap_load(so_name, "h_N", load_h_N);
152 auto c = wrap_load(so_name, "c", load_c);
153 auto c_N = wrap_load(so_name, "c_N", load_c_N);
154
155 this->x_init = vec::Constant(nx, alpaqa::NaN<Conf>);
156 this->param = vec::Constant(p, alpaqa::NaN<Conf>);
157 this->U = Box{nu};
158 this->D = Box{nc};
159 this->D_N = Box{nc_N};
160
161 impl = std::make_unique<CasADiControlFunctionsWithParam<Conf>>(
163 .f = std::move(f),
165 so_name, "jacobian_f", dims(nx, nu, p), dims(dim(nx, nx + nu))),
166 .grad_f_prod = wrapped_load<CasADiFunctionEvaluator<Conf, 4, 1>>(
167 so_name, "grad_f_prod", dims(nx, nu, p, nx), dims(nx + nu)),
168 .h = std::move(h),
169 .h_N = std::move(h_N),
171 so_name, "l", dims(nh, p), dims(1)),
172 .l_N = wrapped_load<CasADiFunctionEvaluator<Conf, 2, 1>>(
173 so_name, "l_N", dims(nh_N, p), dims(1)),
174 .qr = wrapped_load<CasADiFunctionEvaluator<Conf, 3, 1>>(
175 so_name, "qr", dims(nx + nu, nh, p), dims(nx + nu)),
176 .q_N = wrapped_load<CasADiFunctionEvaluator<Conf, 3, 1>>(
177 so_name, "q_N", dims(nx, nh_N, p), dims(nx)),
178 .Q = wrapped_load<CasADiFunctionEvaluator<Conf, 3, 1>>(
179 so_name, "Q", dims(nx + nu, nh, p), dims(dim{nx, nx})),
181 so_name, "Q_N", dims(nx, nh_N, p), dims(dim{nx, nx})),
183 so_name, "R", dims(nx + nu, nh, p), dims(dim{nu, nu})),
185 so_name, "S", dims(nx + nu, nh, p), dims(dim{nu, nx})),
186 .c = std::move(c),
188 so_name, "grad_c_prod", dims(nx, p, nc), dims(nx)),
189 .gn_hess_c = wrapped_load<CasADiFunctionEvaluator<Conf, 3, 1>>(
190 so_name, "gn_hess_c", dims(nx, p, nc), dims(dim{nx, nx})),
191 .c_N = std::move(c_N),
193 so_name, "grad_c_prod_N", dims(nx, p, nc_N), dims(nx)),
194 .gn_hess_c_N = wrapped_load<CasADiFunctionEvaluator<Conf, 3, 1>>(
195 so_name, "gn_hess_c_N", dims(nx, p, nc_N), dims(dim{nx, nx})),
196 });
197
198 auto n_work = std::max({
199 impl->Q.fun.sparsity_out(0).nnz(),
200 impl->Q_N.fun.sparsity_out(0).nnz(),
201 impl->gn_hess_c.fun.sparsity_out(0).nnz(),
202 impl->gn_hess_c_N.fun.sparsity_out(0).nnz(),
203 });
204 this->work = vec::Constant(static_cast<length_t>(n_work), NaN<Conf>);
205
206 auto bounds_filepath = fs::path{so_name}.replace_extension("csv");
207 if (fs::exists(bounds_filepath))
209}
210
211template <Config Conf>
213 const std::filesystem::path &filepath, char sep) {
214 std::ifstream data_file{filepath};
215 if (!data_file)
216 throw std::runtime_error("Unable to open bounds file \"" +
217 filepath.string() + '"');
218 index_t line = 0;
219 auto wrap_bounds_load = [&](std::string_view name, auto &v) {
220 try {
221 ++line;
223 } catch (csv::read_error &e) {
224 throw std::runtime_error("Unable to read " + std::string(name) +
225 " from bounds file \"" +
226 filepath.string() + ':' +
227 std::to_string(line) + "\": " + e.what());
228 }
229 };
230 wrap_bounds_load("U.lowerbound", this->U.lowerbound);
231 wrap_bounds_load("U.upperbound", this->U.upperbound);
232 wrap_bounds_load("D.lowerbound", this->D.lowerbound);
233 wrap_bounds_load("D.upperbound", this->D.upperbound);
234 wrap_bounds_load("D_N.lowerbound", this->D_N.lowerbound);
235 wrap_bounds_load("D_N.upperbound", this->D_N.upperbound);
236 wrap_bounds_load("x_init", this->x_init);
237 wrap_bounds_load("param", this->param);
238}
239
240template <Config Conf>
242 default;
243template <Config Conf>
246
247template <Config Conf>
253
256
259 rvec fxu) const {
260 assert(x.size() == nx);
261 assert(u.size() == nu);
262 assert(fxu.size() == nx);
263 impl->f({x.data(), u.data(), param.data()}, {fxu.data()});
264}
265template <Config Conf>
267 rmat J_fxu) const {
268 assert(x.size() == nx);
269 assert(u.size() == nu);
270 assert(J_fxu.rows() == nx);
271 assert(J_fxu.cols() == nx + nu);
272 impl->jac_f({x.data(), u.data(), param.data()}, {J_fxu.data()});
273}
274template <Config Conf>
276 crvec p,
277 rvec grad_fxu_p) const {
278 assert(x.size() == nx);
279 assert(u.size() == nu);
280 assert(p.size() == nx);
281 assert(grad_fxu_p.size() == nx + nu);
282 impl->grad_f_prod({x.data(), u.data(), param.data(), p.data()},
283 {grad_fxu_p.data()});
284}
285template <Config Conf>
287 rvec h) const {
288 assert(x.size() == nx);
289 assert(u.size() == nu);
290 assert(h.size() == nh);
291 impl->h({x.data(), u.data(), param.data()}, {h.data()});
292}
293template <Config Conf>
295 assert(x.size() == nx);
296 assert(h.size() == nh_N);
297 impl->h_N({x.data(), param.data()}, {h.data()});
298}
299template <Config Conf>
301 assert(h.size() == nh);
302 real_t l;
303 impl->l({h.data(), param.data()}, {&l});
304 return l;
305}
306template <Config Conf>
308 assert(h.size() == nh_N);
309 real_t l;
310 impl->l_N({h.data(), param.data()}, {&l});
311 return l;
312}
313template <Config Conf>
315 rvec qr) const {
316 assert(xu.size() == nx + nu);
317 assert(h.size() == nh);
318 assert(qr.size() == nx + nu);
319 impl->qr({xu.data(), h.data(), param.data()}, {qr.data()});
320}
321template <Config Conf>
323 assert(x.size() == nx);
324 assert(h.size() == nh_N);
325 assert(q.size() == nx);
326 impl->q_N({x.data(), h.data(), param.data()}, {q.data()});
327}
328template <Config Conf>
330 rmat Q) const {
331 assert(xu.size() == nx + nu);
332 assert(h.size() == nh);
333 assert(Q.rows() == nx);
334 assert(Q.cols() == nx);
335 impl->Q({xu.data(), h.data(), param.data()}, {work.data()});
336 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
337 using cmspmat = Eigen::Map<const spmat>;
338 auto &&sparse = impl->Q.fun.sparsity_out(0);
339 if (sparse.is_dense())
340 Q += cmmat{work.data(), nx, nx};
341 else
342 Q += cmspmat{
343 nx,
344 nx,
345 static_cast<length_t>(sparse.nnz()),
346 sparse.colind(),
347 sparse.row(),
348 work.data(),
349 };
350}
351template <Config Conf>
353 assert(x.size() == nx);
354 assert(h.size() == nh_N);
355 assert(Q.rows() == nx);
356 assert(Q.cols() == nx);
357 impl->Q_N({x.data(), h.data(), param.data()}, {work.data()});
358 auto &&sparse = impl->Q_N.fun.sparsity_out(0);
359 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
360 using cmspmat = Eigen::Map<const spmat>;
361 if (sparse.is_dense())
362 Q += cmmat{work.data(), nx, nx};
363 else
364 Q += cmspmat{
365 nx,
366 nx,
367 static_cast<length_t>(sparse.nnz()),
368 sparse.colind(),
369 sparse.row(),
370 work.data(),
371 };
372}
373
374template <Config Conf>
377 rvec work) const {
378 auto &&sparse = impl->R.fun.sparsity_out(0);
379 assert(xu.size() == nx + nu);
380 assert(h.size() == nh);
381 assert(R.rows() <= nu);
382 assert(R.cols() <= nu);
383 assert(R.rows() == mask.size());
384 assert(R.cols() == mask.size());
385 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
386 impl->R({xu.data(), h.data(), param.data()}, {work.data()});
387 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
388 using cmspmat = Eigen::Map<const spmat>;
389 if (sparse.is_dense()) {
390 cmmat R_full{work.data(), nu, nu};
391 R += R_full(mask, mask);
392 } else {
394 nu,
395 nu,
396 static_cast<length_t>(sparse.nnz()),
397 sparse.colind(),
398 sparse.row(),
399 work.data(),
400 };
402 }
403}
404
405template <Config Conf>
408 rvec work) const {
409 auto &&sparse = impl->S.fun.sparsity_out(0);
410 assert(xu.size() == nx + nu);
411 assert(h.size() == nh);
412 assert(S.rows() <= nu);
413 assert(S.rows() == mask.size());
414 assert(S.cols() == nx);
415 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
416 impl->S({xu.data(), h.data(), param.data()}, {work.data()});
417 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
418 using cmspmat = Eigen::Map<const spmat>;
419 using Eigen::indexing::all;
420 if (sparse.is_dense()) {
421 cmmat S_full{work.data(), nu, nx};
422 S += S_full(mask, all);
423 } else {
425 nu,
426 nx,
427 static_cast<length_t>(sparse.nnz()),
428 sparse.colind(),
429 sparse.row(),
430 work.data(),
431 };
433 }
434}
435
436template <Config Conf>
440 crvec v, rvec out,
441 rvec work) const {
442 auto &&sparse = impl->R.fun.sparsity_out(0);
443 assert(v.size() == nu);
444 assert(out.size() == mask_J.size());
445 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
446 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
447 using cmspmat = Eigen::Map<const spmat>;
448 if (sparse.is_dense()) {
449 auto R = cmmat{work.data(), nu, nu};
450 out.noalias() += R(mask_J, mask_K) * v(mask_K);
451 } else {
452 cmspmat R{
453 nu,
454 nu,
455 static_cast<length_t>(sparse.nnz()),
456 sparse.colind(),
457 sparse.row(),
458 work.data(),
459 };
460 // out += R_full(mask_J,mask_K) * v(mask_K);
462 }
463}
464
465template <Config Conf>
468 crvec v, rvec out,
469 rvec work) const {
470 auto &&sparse = impl->S.fun.sparsity_out(0);
471 assert(v.size() == nu);
472 assert(out.size() == nx);
473 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
474 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
475 using cmspmat = Eigen::Map<const spmat>;
476 using Eigen::indexing::all;
477 if (sparse.is_dense()) {
478 auto Sᵀ = cmmat{work.data(), nu, nx}.transpose();
479 out.noalias() += Sᵀ(all, mask_K) * v(mask_K);
480 } else {
481 cmspmat S{
482 nu,
483 nx,
484 static_cast<length_t>(sparse.nnz()),
485 sparse.colind(),
486 sparse.row(),
487 work.data(),
488 };
489 // out += S(mask_K,:)ᵀ * v(mask_K);
491 }
492}
493
494template <Config Conf>
496 auto &&sparse = impl->R.fun.sparsity_out(0);
497 return static_cast<length_t>(sparse.nnz());
498}
499
500template <Config Conf>
502 auto &&sparse = impl->S.fun.sparsity_out(0);
503 return static_cast<length_t>(sparse.nnz());
504}
505
506template <Config Conf>
508 if (nc == 0)
509 return;
510 assert(x.size() == nx);
511 assert(c.size() == nc);
512 impl->c({x.data(), param.data()}, {c.data()});
513}
514
515template <Config Conf>
517 crvec p,
518 rvec grad_cx_p) const {
519 assert(x.size() == nx);
520 assert(p.size() == nc);
521 assert(grad_cx_p.size() == nx);
522 impl->grad_c_prod({x.data(), param.data(), p.data()}, {grad_cx_p.data()});
523}
524
525template <Config Conf>
527 crvec M,
528 rmat out) const {
529 auto &&sparse = impl->gn_hess_c.fun.sparsity_out(0);
530 assert(x.size() == nx);
531 assert(M.size() == nc);
532 assert(out.rows() == nx);
533 assert(out.cols() == nx);
534 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
535 impl->gn_hess_c({x.data(), param.data(), M.data()}, {work.data()});
536 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
537 using cmspmat = Eigen::Map<const spmat>;
538 if (sparse.is_dense())
539 out += cmmat{work.data(), nx, nx};
540 else
541 out += cmspmat{
542 nx,
543 nx,
544 static_cast<length_t>(sparse.nnz()),
545 sparse.colind(),
546 sparse.row(),
547 work.data(),
548 };
549}
550
551template <Config Conf>
553 if (nc_N == 0)
554 return;
555 assert(x.size() == nx);
556 assert(c.size() == nc_N);
557 impl->c_N({x.data(), param.data()}, {c.data()});
558}
559
560template <Config Conf>
562 rvec grad_cx_p) const {
563 assert(x.size() == nx);
564 assert(p.size() == nc_N);
565 assert(grad_cx_p.size() == nx);
566 impl->grad_c_prod_N({x.data(), param.data(), p.data()}, {grad_cx_p.data()});
567}
568
569template <Config Conf>
571 rmat out) const {
572 auto &&sparse = impl->gn_hess_c.fun.sparsity_out(0);
573 assert(x.size() == nx);
574 assert(M.size() == nc_N);
575 assert(out.rows() == nx);
576 assert(out.cols() == nx);
577 assert(work.size() >= static_cast<length_t>(sparse.nnz()));
578 impl->gn_hess_c_N({x.data(), param.data(), M.data()}, {work.data()});
579 using spmat = Eigen::SparseMatrix<real_t, Eigen::ColMajor, casadi_int>;
580 using cmspmat = Eigen::Map<const spmat>;
581 if (sparse.is_dense())
582 out += cmmat{work.data(), nx, nx};
583 else
584 out += cmspmat{
585 nx,
586 nx,
587 static_cast<length_t>(sparse.nnz()),
588 sparse.colind(),
589 sparse.row(),
590 work.data(),
591 };
592}
593
594} // namespace alpaqa
void eval_jac_f(index_t timestep, crvec x, crvec u, rmat J_fxu) const
CasADiControlProblem(const std::string &so_name, length_t N)
void eval_add_R_prod_masked(index_t timestep, crvec xu, crvec h, crindexvec mask_J, crindexvec mask_K, crvec v, rvec out, rvec work) const
CasADiControlProblem & operator=(const CasADiControlProblem &)
void eval_add_gn_hess_constr_N(crvec x, crvec M, rmat out) const
void eval_qr(index_t timestep, crvec xu, crvec h, rvec qr) const
void eval_add_S_prod_masked(index_t timestep, crvec xu, crvec h, crindexvec mask_K, crvec v, rvec out, rvec work) const
void eval_constr_N(crvec x, rvec c) const
void load_numerical_data(const std::filesystem::path &filepath, char sep=',')
Load the numerical problem data (bounds and parameters) from a CSV file.
void eval_grad_constr_prod_N(crvec x, crvec p, rvec grad_cx_p) const
void eval_add_R_masked(index_t timestep, crvec xu, crvec h, crindexvec mask, rmat R, rvec work) const
real_t eval_l(index_t timestep, crvec h) const
void eval_grad_f_prod(index_t timestep, crvec x, crvec u, crvec p, rvec grad_fxu_p) const
void eval_add_gn_hess_constr(index_t timestep, crvec x, crvec M, rmat out) const
void eval_constr(index_t timestep, crvec x, rvec c) const
void eval_h(index_t timestep, crvec x, crvec u, rvec h) const
util::copyable_unique_ptr< Functions > impl
void eval_add_S_masked(index_t timestep, crvec xu, crvec h, crindexvec mask, rmat S, rvec work) const
void eval_q_N(crvec x, crvec h, rvec q) const
void eval_add_Q_N(crvec x, crvec h, rmat Q) const
void eval_grad_constr_prod(index_t timestep, crvec x, crvec p, rvec grad_cx_p) const
void eval_add_Q(index_t timestep, crvec xu, crvec h, rmat Q) const
void eval_h_N(crvec x, rvec h) const
Class for evaluating CasADi functions, allocating the necessary workspace storage in advance for allo...
void validate_dimensions(const std::array< casadi_dim, N_in > &dim_in={}, const std::array< casadi_dim, N_out > &dim_out={})
#define USING_ALPAQA_CONFIG(Conf)
Definition config.hpp:56
std::pair< casadi_int, casadi_int > dim
void read_row(std::istream &is, Eigen::Ref< Eigen::VectorX< float > > v, char sep)
Definition csv.hpp:34
void sparse_matvec_add_masked_rows_cols(const SpMat &R, const CVec &v, Vec &&out, const MaskVec &mask_J, const MaskVec &mask_K)
out += R(mask_J,mask_K) * v(mask_K);
void sparse_add_masked_rows(const SpMat &S_full, Mat &&S, const MaskVec &mask)
S += S_full(mask,:)
void sparse_add_masked(const SpMat &R_full, Mat &&R, const MaskVec &mask)
R += R_full(mask,mask)
void sparse_matvec_add_transpose_masked_rows(const SpMat &S, const CVec &v, Vec &&out, const MaskVec &mask)
out += S(mask,:)ᵀ * v(mask);
typename Conf::rmat rmat
Definition config.hpp:74
typename Conf::cmmat cmmat
Definition config.hpp:73
typename Conf::real_t real_t
Definition config.hpp:65
typename Conf::index_t index_t
Definition config.hpp:77
typename Conf::length_t length_t
Definition config.hpp:76
constexpr const auto inf
Definition config.hpp:85
typename Conf::rvec rvec
Definition config.hpp:69
typename Conf::crvec crvec
Definition config.hpp:70
typename Conf::crindexvec crindexvec
Definition config.hpp:80
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > qr
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > jac_f
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > f
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > c
CasADiFunctionEvaluator< Conf, 3+WithParam, 1 > grad_f_prod
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > R
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > grad_c_prod
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > q_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > grad_c_prod_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > h_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > l
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > Q
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > S
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > gn_hess_c_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > gn_hess_c
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > l_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > Q_N
CasADiFunctionEvaluator< Conf, 1+WithParam, 1 > c_N
CasADiFunctionEvaluator< Conf, 2+WithParam, 1 > h