From b49e39e300f9d87310f6cce20018427a98f34486 Mon Sep 17 00:00:00 2001 From: jeanpouget-abadie Date: Sun, 30 Nov 2014 23:17:41 -0500 Subject: convex_optimization first draft --- jpa_test/convex_optimization.py | 47 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 jpa_test/convex_optimization.py (limited to 'jpa_test/convex_optimization.py') diff --git a/jpa_test/convex_optimization.py b/jpa_test/convex_optimization.py new file mode 100644 index 0000000..ebd68e3 --- /dev/null +++ b/jpa_test/convex_optimization.py @@ -0,0 +1,47 @@ +import theano +from theano import tensor, function +import numpy as np +import cvxopt + + + +def l1regls(M_val, w_val): + """ + Solves: + min - sum theta + s.t theta <= 0 + |e^{M*theta} - (1 - w)|_2 <= 1 + """ + m, n = M_val.shape + c = cvxopt.matrix(-1.0, (n,1)) + + theta = tensor.row().T + z = tensor.row().T + theta_ = theta.flatten() + z_ = z.flatten() + M = theano.shared(M_val.astype(theano.config.floatX)) + w = theano.shared(w_val.astype(theano.config.floatX)) + y = (tensor.exp(M.dot(theta_)) - (1 - w)).norm(2) - 1 + y_diff = tensor.grad(y, theta_) + y_hess = z[0] * theano.gradient.hessian(y, theta_) + f_x = theano.function([theta], [y, y_diff], allow_input_downcast=True) + f_xz = theano.function([theta, z], [y, y_diff, y_hess]) + + def F(x=None, z=None): + if x is None: + return 1, cvxopt.matrix(1.0, (n,1)) + elif z is None: + y, y_diff = f_x(x) + return cvxopt.matrix(float(y), (1, 1)), cvxopt.matrix(y_diff.astype("float64")).T + else: + y, y_diff, y_hess = f_xz(x, z) + return cvxopt.matrix(float(y), (1, 1)), \ + cvxopt.matrix(y_diff.astype("float64")).T, \ + cvxopt.matrix(y_hess.astype("float64")) + + return cvxopt.solvers.cpl(c,F)['x'] + +if __name__=="__main__": + M_val = np.random.rand(100, 20) + w_val = np.random.rand(100) + l1regls(M_val, w_val) -- cgit v1.2.3-70-g09d2