Lasso#
In this example, we use the PANOC solver to solve a lasso problem, i.e. least squares with \(\ell_1\)-regularization to promote sparsity.
1# %% alpaqa lasso example
2
3import alpaqa as pa
4import alpaqa.casadi_loader as cl
5import casadi as cs
6import numpy as np
7from pprint import pprint
8
9scale = 50
10n, m = scale, scale * 2
11sparsity = 0.2
12rng = np.random.default_rng(0)
13
14# %% Build the problem (CasADi code, independent of alpaqa)
15
16# Quadratic loss plus l1-regularization
17# minimize ½‖Ax - b‖² + λ‖x‖₁
18
19A = rng.uniform(-1, 1, (m, n))
20x_exact = rng.uniform(0, 1, n)
21x_exact[rng.uniform(0, 1, n) > sparsity] = 0
22b = A @ x_exact + rng.normal(0, 0.1, m)
23λ = 0.025 * m
24
25# Symbolic solution
26x = cs.MX.sym("x", n)
27# Objective function is squared norm of Ax - b
28f = 0.5 * cs.sumsqr(A @ x - b)
29
30# %% Generate and compile C-code for the objective and constraints using alpaqa
31
32# Compile and load the problem
33problem = (
34 pa.minimize(f, x)
35 .with_l1_regularizer(λ)
36).compile(sym=cs.MX.sym)
37
38# %% Solve the problem using alpaqa's PANOC solver
39
40direction = pa.LBFGSDirection({"memory": scale})
41solver = pa.PANOCSolver({"print_interval": 10}, direction)
42# Add evaluation counters to the problem
43cnt = pa.problem_with_counters(problem)
44# Solve
45sol, stats = solver(cnt.problem, {"tolerance": 1e-10})
46
47# %% Print the results
48
49final_f = problem.eval_f(sol)
50print()
51pprint(stats)
52print()
53print("Evaluations:")
54print(cnt.evaluations)
55print(f"Cost: {final_f + stats['final_h']}")
56print(f"Loss: {final_f}")
57print(f"Regularizer: {stats['final_h']}")
58print(f"FP Residual: {stats['ε']}")
59print(f"Run time: {stats['elapsed_time']}")
60print(stats["status"])
61
62# %% Plot the results
63
64import matplotlib.pyplot as plt
65
66plt.figure(figsize=(8, 5))
67plt.plot(x_exact, ".-", label="True solution")
68plt.plot(sol, ".-", label="Estimated solution")
69plt.legend()
70plt.title("PANOC lasso example")
71plt.tight_layout()
72plt.show()