aboutsummaryrefslogtreecommitdiffstats
path: root/jpa_test/convex_optimization.py
diff options
context:
space:
mode:
Diffstat (limited to 'jpa_test/convex_optimization.py')
-rw-r--r--jpa_test/convex_optimization.py135
1 files changed, 0 insertions, 135 deletions
diff --git a/jpa_test/convex_optimization.py b/jpa_test/convex_optimization.py
deleted file mode 100644
index 530e3f5..0000000
--- a/jpa_test/convex_optimization.py
+++ /dev/null
@@ -1,135 +0,0 @@
-import theano
-import cascade_creation
-from theano import tensor, function
-import numpy as np
-import timeout
-import cvxopt
-
-
-@timeout.timeout(10)
-def l1obj_l2constraint(M_val, w_val):
- """
- Solves:
- min - sum_j theta_j
- s.t theta_j <= 0
- |e^{M*theta} - (1 - w)|_2 <= 1
- """
- assert len(M_val) == len(w_val)
-
- if M_val.dtype == bool:
- M_val = M_val.astype('float32')
-
- m, n = M_val.shape
- c = cvxopt.matrix(-1.0, (n,1))
-
- theta = tensor.row().T
- z = tensor.row().T
- theta_ = theta.flatten()
- z_ = z.flatten()
- M = theano.shared(M_val.astype(theano.config.floatX))
- w = theano.shared(w_val.astype(theano.config.floatX))
- y = (tensor.exp(M.dot(theta_)) - (1 - w)).norm(2) - 1
- y_diff = tensor.grad(y, theta_)
- y_hess = z[0] * theano.gradient.hessian(y, theta_)
- f_x = theano.function([theta], [y, y_diff], allow_input_downcast=True)
- f_xz = theano.function([theta, z], [y, y_diff, y_hess],
- allow_input_downcast=True)
-
- def F(x=None, z=None):
- if x is None:
- return 1, cvxopt.matrix(.0001, (n,1))
- elif z is None:
- y, y_diff = f_x(x)
- return cvxopt.matrix(float(y), (1, 1)),\
- cvxopt.matrix(y_diff.astype("float64")).T
- else:
- y, y_diff, y_hess = f_xz(x, z)
- return cvxopt.matrix(float(y), (1, 1)), \
- cvxopt.matrix(y_diff.astype("float64")).T, \
- cvxopt.matrix(y_hess.astype("float64"))
-
- G = cvxopt.spdiag([1 for i in xrange(n)])
- h = cvxopt.matrix(0.0, (n,1))
-
- cvxopt.solvers.options['show_progress'] = False
- try:
- theta = cvxopt.solvers.cpl(c,F, G, h)['x']
- except ArithmeticError:
- print "ArithmeticError thrown, change initial point"+\
- " given to the solver"
-
- return 1 - np.exp(theta), theta
-
-
-@timeout.timeout(10)
-def l1obj_l2penalization(M_val, w_val, lbda):
- """
- Solves:
- min - sum_j theta_j + lbda*|e^{M*theta} - (1 - w)|_2
- s.t theta_j <= 0
- """
- assert len(M_val) == len(w_val)
-
- if M_val.dtype == bool:
- M_val = M_val.astype('float32')
-
- if type(lbda) == int:
- lbda = np.array(lbda)
-
- m, n = M_val.shape
-
- theta = tensor.row().T
- z = tensor.row().T
- theta_ = theta.flatten()
- z_ = z.flatten()
- M = theano.shared(M_val.astype(theano.config.floatX))
- w = theano.shared(w_val.astype(theano.config.floatX))
- lbda = theano.shared(lbda.astype(theano.config.floatX))
- y = (theta_).norm(1) + lbda * (
- tensor.exp(M.dot(theta_)) - (1 - w)).norm(2)
- y_diff = tensor.grad(y, theta_)
- y_hess = z[0] * theano.gradient.hessian(y, theta_)
- f_x = theano.function([theta], [y, y_diff], allow_input_downcast=True)
- f_xz = theano.function([theta, z], [y, y_diff, y_hess],
- allow_input_downcast=True)
-
- def F(x=None, z=None):
- if x is None:
- return 0, cvxopt.matrix(.0001, (n,1))
- elif z is None:
- y, y_diff = f_x(x)
- return cvxopt.matrix(float(y), (1, 1)),\
- cvxopt.matrix(y_diff.astype("float64")).T
- else:
- y, y_diff, y_hess = f_xz(x, z)
- return cvxopt.matrix(float(y), (1, 1)), \
- cvxopt.matrix(y_diff.astype("float64")).T, \
- cvxopt.matrix(y_hess.astype("float64"))
-
- G = cvxopt.spdiag([1 for i in xrange(n)])
- h = cvxopt.matrix(0.0, (n,1))
-
- cvxopt.solvers.options['show_progress'] = False
- try:
- theta = cvxopt.solvers.cp(F, G, h)['x']
- except ArithmeticError:
- print "ArithmeticError thrown, change initial point"+\
- " given to the solver"
-
- return 1 - np.exp(theta), theta
-
-
-def test():
- """
- unit test
- """
- lbda = 10
- G = cascade_creation.InfluenceGraph(max_proba=.8)
- G.erdos_init(n=100, p = .1)
- A = cascade_creation.generate_cascades(G, .1, 2000)
- M_val, w_val = cascade_creation.icc_matrixvector_for_node(A, 0)
- p_vec, theta = l1obj_l2penalization(M_val, w_val, lbda)
- print p_vec
-
-if __name__=="__main__":
- test()