diff options
| author | jeanpouget-abadie <jean.pougetabadie@gmail.com> | 2015-02-04 18:39:03 -0500 |
|---|---|---|
| committer | jeanpouget-abadie <jean.pougetabadie@gmail.com> | 2015-02-04 18:39:03 -0500 |
| commit | 393cba417046147286001e7317a36db148545bb1 (patch) | |
| tree | 42f3060330b28d1a7a069da9b70ae0a8b214ef40 /src/convex_optimization.py | |
| parent | 0e6ef8ce1055b3a524e2432ffda76f1acceed3d3 (diff) | |
| download | cascades-393cba417046147286001e7317a36db148545bb1.tar.gz | |
routine commit
Diffstat (limited to 'src/convex_optimization.py')
| -rw-r--r-- | src/convex_optimization.py | 6 |
1 files changed, 3 insertions, 3 deletions
diff --git a/src/convex_optimization.py b/src/convex_optimization.py index e355bc6..fce89b8 100644 --- a/src/convex_optimization.py +++ b/src/convex_optimization.py @@ -80,7 +80,7 @@ def type_lasso(lbda, n_cascades): return f_x, f_xz -@timeout.timeout(10) +@timeout.timeout(70) def diff_and_opt(M_val, w_val, f_x, f_xz): if M_val.dtype == bool: @@ -90,7 +90,7 @@ def diff_and_opt(M_val, w_val, f_x, f_xz): def F(x=None, z=None): if x is None: - return 0, cvxopt.matrix(-.001, (n,1)) + return 0, cvxopt.matrix(-.7, (n,1)) elif z is None: y, y_diff = f_x(x, M_val, w_val) return cvxopt.matrix(float(y), (1, 1)),\ @@ -116,7 +116,7 @@ def diff_and_opt(M_val, w_val, f_x, f_xz): " given to the solver") except ValueError: print("Domain Error, skipping to next node") - theta = np.zeros(len(w_val)) + theta = np.zeros(M_val.shape[1]) if cvxopt.solvers.options['show_progress']: print(1 - np.exp(theta)) |
