diff options
| author | jeanpouget-abadie <jean.pougetabadie@gmail.com> | 2015-02-02 14:08:03 -0500 |
|---|---|---|
| committer | jeanpouget-abadie <jean.pougetabadie@gmail.com> | 2015-02-02 14:08:03 -0500 |
| commit | c883cb7ff6c7536d8dc4ea15dfc733bfbb98693c (patch) | |
| tree | 11f42659f8c36a8dea905cd64ff4b6454118a173 /src/convex_optimization.py | |
| parent | 873ce616a9d21d670f15ab7b5ae61cc143dccc80 (diff) | |
| download | cascades-c883cb7ff6c7536d8dc4ea15dfc733bfbb98693c.tar.gz | |
earlier version
Diffstat (limited to 'src/convex_optimization.py')
| -rw-r--r-- | src/convex_optimization.py | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/src/convex_optimization.py b/src/convex_optimization.py index 3241bdb..6a43705 100644 --- a/src/convex_optimization.py +++ b/src/convex_optimization.py @@ -36,10 +36,10 @@ def sparse_recovery(M_val, w_val, lbda): w = theano.shared(w_val.astype(theano.config.floatX)) lbda = theano.shared(lbda.astype(theano.config.floatX)) - y = lbda*1./np.sqrt(m) * (theta_).norm(1) - 1./m*( - tensor.dot(1-w, tensor.log(1-tensor.exp(M.dot(theta_ *1./(n*m)))))\ - + (1 - 1./(n*m)) * tensor.dot(1 - w, tensor.dot(M, theta_)) \ - + tensor.dot(w, tensor.dot(M, theta_))) + y = lbda * theta_.norm(1) - 1./m*( + tensor.dot(1-w, tensor.log(1-tensor.exp(M.dot(theta_)))) + + tensor.dot(w, tensor.dot(M, theta_)) + ) return diff_and_opt(theta, theta_, M, M_val, w, lbda, y) |
