alpaqa 0.0.1
Nonconvex constrained optimization
codegen-rosenbrock.py
Go to the documentation of this file.
1from casadi import SX, Function, CodeGenerator, vertcat, jtimes, gradient
2from sys import argv
3
4if len(argv) < 2:
5 print(f"Usage: {argv[0]} <name>")
6 exit(0)
7
8x = SX.sym("x")
9y = SX.sym("y")
10z = SX.sym("z")
11unknwns = vertcat(x, y, z)
12
13w = SX.sym("w")
14
15# Formulate the NLP
16f = x**2 + 100*z**2
17g = z + (1-x)**2 - y
18
19cg = CodeGenerator(f"{argv[1]}.c")
20cg.add(Function("f", [unknwns],
21 [f],
22 ["x"], ["f"]))
23cg.add(Function("grad_f", [unknwns],
24 [gradient(f, unknwns)],
25 ["x"], ["grad_f"]))
26cg.add(Function("g", [unknwns],
27 [g],
28 ["x"], ["g"]))
29cg.add(Function("grad_g", [unknwns, w],
30 [jtimes(g, unknwns, w, True)],
31 ["x", "w"], ["grad_g"]))
32cg.generate()