I would like to take a portfolio and constraint it's factor variance to be below a percentage of it's total variance. Below is a fully runnable example, where the solver succeeds but the constraint I have written to limit factor variance does not seem to work. The two other helper constriants do work though. What do you think is the issue?
import numpy as np
import pandas as pd
from mosek.fusion import Model, Domain, Expr, ObjectiveSense
np.random.seed(0)
N, K = 10, 5
# Random factor exposures B (N×K) and factor covariance COV (K×K)
B = np.random.randn(N, K)
A = np.random.randn(K, K)
COV = A.T @ A + np.eye(K) * 0.1
# Random idiosyncratic volatilities (specific risk)
specific_vol = np.random.rand(N) * 0.2 + 0.05
# Random "alpha score" vector
score = pd.Series(np.random.randn(N), index=[f"Ticker{i}" for i in range(N)])
mu = score.to_numpy()
# Build covariance blocks and Cholesky factors
Q_fac = B @ COV @ B.T
Q_idio = np.diag(specific_vol**2)
Q_tot = Q_fac + Q_idio
eps = 1e-8
L_fac = np.linalg.cholesky(Q_fac + eps * np.eye(N))
L_tot = np.linalg.cholesky(Q_tot + eps * np.eye(N))
GMV = 300.0 # GMV budget
fac_var_pct_total = 20.0 # target factor-variance share (%)
alpha = fac_var_pct_total / 100.0 # fraction
M = Model("alpha_with_factor_var_share")
x = M.variable("x", N, Domain.unbounded())
u = M.variable("u", N, Domain.greaterThan(0.0))
# 3a) GMV budget: sum |x| ≤ GMV
M.constraint("abs1", Expr.sub(u, x), Domain.greaterThan(0.0))
M.constraint("abs2", Expr.sub(u, Expr.mul(-1.0, x)), Domain.greaterThan(0.0))
M.constraint("gmv", Expr.sub(Expr.sum(u), GMV), Domain.lessThan(0.0))
t_tot = M.variable("t_tot", 1, Domain.greaterThan(0.0))
M.constraint("tot_cone", Expr.vstack(t_tot, Expr.mul(L_tot, x)), Domain.inQCone())
bound = np.sqrt(alpha)
M.constraint(
"fac_share_cone",
Expr.vstack(
Expr.mul(bound, t_tot),
Expr.mul(L_fac, x)
),
Domain.inQCone()
)
# Helper constraint to prevent total budget going to single position
max_pct = 0.3
max_pos = GMV * max_pct
for i in range(N):
# Min position size
M.constraint(
f"pos_cap_{i}",
Expr.sub(u.index(i), max_pos),
Domain.lessThan(0.0)
)
M.objective("max_alpha", ObjectiveSense.Maximize, Expr.dot(mu, x))
M.solve()
x_opt = np.array(x.level()).ravel()
gmv_val = float(np.sum(np.abs(x_opt)))
fac_var = float(x_opt @ Q_fac @ x_opt)
tot_var = float(x_opt @ Q_tot @ x_opt)
fac_share = fac_var / tot_var
results = pd.DataFrame({
"GMV Budget": [GMV],
"Actual GMV": [gmv_val],
"Factor Var %": [fac_share * 100],
"Target Factor Var %": [fac_var_pct_total],
"Total Variance": [tot_var]
})
results.to_dict()
```
Which prints for me:
```
{'GMV Budget': {0: 300.0},
'Actual GMV': {0: 299.9999996150878},
'Factor Var %': {0: 99.9259003293236},
'Target Factor Var %': {0: 20.0},
'Total Variance': {0: 294004.5697279128}}
```
So, > 99% of variance is from factor variance whereas the constraint was for <= 20%. To me it seems like the cone constraints are set properly, so I'm a bit confused.
Thank you!