aboutsummaryrefslogtreecommitdiffstats
path: root/jpa_test
diff options
context:
space:
mode:
authorjeanpouget-abadie <jean.pougetabadie@gmail.com>2014-12-05 16:04:31 -0500
committerjeanpouget-abadie <jean.pougetabadie@gmail.com>2014-12-05 16:04:31 -0500
commite83914a338aa5fb8fb90cddbf306eca8f90365b5 (patch)
tree4dc8eb8e491fb12c0a57c16202ef037236d717c6 /jpa_test
parentee58bbeb5e8d870835c03d7f36b9976a0e8b857f (diff)
downloadcascades-e83914a338aa5fb8fb90cddbf306eca8f90365b5.tar.gz
lagrangian function convex_optimization
Diffstat (limited to 'jpa_test')
-rw-r--r--jpa_test/algorithms.py22
-rw-r--r--jpa_test/convex_optimization.py74
2 files changed, 77 insertions, 19 deletions
diff --git a/jpa_test/algorithms.py b/jpa_test/algorithms.py
index 0d5f154..5d28300 100644
--- a/jpa_test/algorithms.py
+++ b/jpa_test/algorithms.py
@@ -35,11 +35,11 @@ def greedy_prediction(G, cascades):
return G_hat
-def recovery_l1obj_l2constraint(G, cascades):
+def recovery_l1obj_l2constraint(G, cascades, floor_cstt, passed_function,
+ *args, **kwargs):
"""
- Returns estimated graph from following convex program:
- min |theta_1| + lbda | exp(M theta) -(1- w)|
- where theta = log (1 - p); w = 1_{infected}; lbda = lagrange cstt
+ Returns estimated graph from convex program specified by passed_function
+ passed_function should have similar structure to ones in convex_optimation
"""
G_hat = cascade_creation.InfluenceGraph(max_proba=None)
G_hat.add_nodes_from(G.nodes())
@@ -47,9 +47,9 @@ def recovery_l1obj_l2constraint(G, cascades):
print node
try:
M, w = cascade_creation.icc_matrixvector_for_node(cascades, node)
- p_node, __ = convex_optimization.l1obj_l2constraint(M,w)
+ p_node, __ = passed_function(M,w, *args, **kwargs)
G_hat = cascade_creation.add_edges_from_proba_vector(G=G_hat,
- p_node=p_node, node=node, floor_cstt=.01)
+ p_node=p_node, node=node, floor_cstt=floor_cstt)
except timeout.TimeoutError:
print "TimeoutError, skipping to next node"
return G_hat
@@ -75,14 +75,14 @@ def test():
"""
unit test
"""
- G = cascade_creation.InfluenceGraph(max_proba = .5)
+ G = cascade_creation.InfluenceGraph(max_proba = .8)
G.erdos_init(n = 100, p = .3)
import time
t0 = time.time()
- A = cascade_creation.generate_cascades(G, .2, 100)
-
- G_hat = recovery_l1obj_l2constraint(G, A)
-
+ A = cascade_creation.generate_cascades(G, .2, 10000)
+ G_hat = recovery_l1obj_l2constraint(G, A,
+ passed_function=convex_optimization.l1obj_l2penalization,
+ floor_cstt=.1, lbda=100)
correctness_measure(G, G_hat, print_values=True)
diff --git a/jpa_test/convex_optimization.py b/jpa_test/convex_optimization.py
index a9556ae..530e3f5 100644
--- a/jpa_test/convex_optimization.py
+++ b/jpa_test/convex_optimization.py
@@ -32,7 +32,8 @@ def l1obj_l2constraint(M_val, w_val):
y_diff = tensor.grad(y, theta_)
y_hess = z[0] * theano.gradient.hessian(y, theta_)
f_x = theano.function([theta], [y, y_diff], allow_input_downcast=True)
- f_xz = theano.function([theta, z], [y, y_diff, y_hess], allow_input_downcast=True)
+ f_xz = theano.function([theta, z], [y, y_diff, y_hess],
+ allow_input_downcast=True)
def F(x=None, z=None):
if x is None:
@@ -60,18 +61,75 @@ def l1obj_l2constraint(M_val, w_val):
return 1 - np.exp(theta), theta
+@timeout.timeout(10)
+def l1obj_l2penalization(M_val, w_val, lbda):
+ """
+ Solves:
+ min - sum_j theta_j + lbda*|e^{M*theta} - (1 - w)|_2
+ s.t theta_j <= 0
+ """
+ assert len(M_val) == len(w_val)
+
+ if M_val.dtype == bool:
+ M_val = M_val.astype('float32')
+
+ if type(lbda) == int:
+ lbda = np.array(lbda)
+
+ m, n = M_val.shape
+
+ theta = tensor.row().T
+ z = tensor.row().T
+ theta_ = theta.flatten()
+ z_ = z.flatten()
+ M = theano.shared(M_val.astype(theano.config.floatX))
+ w = theano.shared(w_val.astype(theano.config.floatX))
+ lbda = theano.shared(lbda.astype(theano.config.floatX))
+ y = (theta_).norm(1) + lbda * (
+ tensor.exp(M.dot(theta_)) - (1 - w)).norm(2)
+ y_diff = tensor.grad(y, theta_)
+ y_hess = z[0] * theano.gradient.hessian(y, theta_)
+ f_x = theano.function([theta], [y, y_diff], allow_input_downcast=True)
+ f_xz = theano.function([theta, z], [y, y_diff, y_hess],
+ allow_input_downcast=True)
+
+ def F(x=None, z=None):
+ if x is None:
+ return 0, cvxopt.matrix(.0001, (n,1))
+ elif z is None:
+ y, y_diff = f_x(x)
+ return cvxopt.matrix(float(y), (1, 1)),\
+ cvxopt.matrix(y_diff.astype("float64")).T
+ else:
+ y, y_diff, y_hess = f_xz(x, z)
+ return cvxopt.matrix(float(y), (1, 1)), \
+ cvxopt.matrix(y_diff.astype("float64")).T, \
+ cvxopt.matrix(y_hess.astype("float64"))
+
+ G = cvxopt.spdiag([1 for i in xrange(n)])
+ h = cvxopt.matrix(0.0, (n,1))
+
+ cvxopt.solvers.options['show_progress'] = False
+ try:
+ theta = cvxopt.solvers.cp(F, G, h)['x']
+ except ArithmeticError:
+ print "ArithmeticError thrown, change initial point"+\
+ " given to the solver"
+
+ return 1 - np.exp(theta), theta
+
+
def test():
"""
unit test
"""
- G = cascade_creation.InfluenceGraph(max_proba=.5)
- G.erdos_init(n=100, p = .8)
- A = cascade_creation.generate_cascades(G, .1, 20)
+ lbda = 10
+ G = cascade_creation.InfluenceGraph(max_proba=.8)
+ G.erdos_init(n=100, p = .1)
+ A = cascade_creation.generate_cascades(G, .1, 2000)
M_val, w_val = cascade_creation.icc_matrixvector_for_node(A, 0)
- assert len(M_val) == len(w_val)
- print np.linalg.matrix_rank(M_val)
- p_vec, theta = l1obj_l2constraint(M_val, w_val)
- print len(p_vec)
+ p_vec, theta = l1obj_l2penalization(M_val, w_val, lbda)
+ print p_vec
if __name__=="__main__":
test()