Lasso¶
In this example, we use the PANOC solver to solve a lasso problem, i.e. least squares with \(\ell_1\)-regularization to promote sparsity.
1# %% alpaqa lasso example
2
3from pprint import pprint
4
5import casadi as cs
6import numpy as np
7
8import alpaqa as pa
9
10scale = 50
11n, m = scale, scale * 2
12sparsity = 0.2
13rng = np.random.default_rng(0)
14
15# %% Build the problem (CasADi code, independent of alpaqa)
16
17# Quadratic loss plus l1-regularization
18# minimize ½‖Ax - b‖² + λ‖x‖₁
19
20A = rng.uniform(-1, 1, (m, n))
21x_exact = rng.uniform(0, 1, n)
22x_exact[rng.uniform(0, 1, n) > sparsity] = 0
23b = A @ x_exact + rng.normal(0, 0.1, m)
24λ = 0.025 * m
25
26# Symbolic solution
27x = cs.MX.sym("x", n)
28# Objective function is squared norm of Ax - b
29f = 0.5 * cs.sumsqr(A @ x - b)
30
31# %% Generate and compile C code for the objective and constraints using alpaqa
32
33# Compile and load the problem
34problem = (
35 pa.minimize(f, x)
36 .with_l1_regularizer(λ)
37).compile(sym=cs.MX.sym)
38
39# %% Solve the problem using alpaqa's PANOC solver
40
41direction = pa.LBFGSDirection({"memory": scale})
42solver = pa.PANOCSolver({"print_interval": 10}, direction)
43# Add evaluation counters to the problem
44cnt = pa.problem_with_counters(problem)
45# Solve
46sol, stats = solver(cnt.problem, {"tolerance": 1e-10})
47
48# %% Print the results
49
50final_f = problem.eval_objective(sol)
51print()
52pprint(stats)
53print()
54print("Evaluations:")
55print(cnt.evaluations)
56print(f"Cost: {final_f + stats['final_h']}")
57print(f"Loss: {final_f}")
58print(f"Regularizer: {stats['final_h']}")
59print(f"FP Residual: {stats['ε']}")
60print(f"Run time: {stats['elapsed_time']}")
61print(stats["status"])
62
63# %% Plot the results
64
65import matplotlib.pyplot as plt
66
67plt.figure(figsize=(8, 5))
68plt.plot(x_exact, ".-", label="True solution")
69plt.plot(sol, ".-", label="Estimated solution")
70plt.legend()
71plt.title("PANOC lasso example")
72plt.tight_layout()
73plt.show()