diff options
| -rw-r--r-- | src/cascade_creation.py | 3 | ||||
| -rw-r--r-- | src/convex_optimization.py | 18 | ||||
| -rw-r--r-- | src/make_plots.py | 7 |
3 files changed, 16 insertions, 12 deletions
diff --git a/src/cascade_creation.py b/src/cascade_creation.py index 93ce662..39fc531 100644 --- a/src/cascade_creation.py +++ b/src/cascade_creation.py @@ -1,6 +1,7 @@ import networkx as nx import numpy as np import collections +import timeout #from itertools import izip from sklearn.preprocessing import normalize @@ -70,6 +71,7 @@ class Cascade(list): return candidate_infectors +@timeout.timeout(5) def icc_cascade(G, p_init): """ Returns boolean vectors for one cascade @@ -87,7 +89,6 @@ def icc_cascade(G, p_init): active = active & susceptible susceptible = susceptible & np.logical_not(active) if not cascade: - print("Empty cascade, consider changing p_init or n_nodes. Retrying.") return icc_cascade(G, p_init) return cascade diff --git a/src/convex_optimization.py b/src/convex_optimization.py index f9be47d..3241bdb 100644 --- a/src/convex_optimization.py +++ b/src/convex_optimization.py @@ -36,8 +36,7 @@ def sparse_recovery(M_val, w_val, lbda): w = theano.shared(w_val.astype(theano.config.floatX)) lbda = theano.shared(lbda.astype(theano.config.floatX)) - #Objective - y = lbda * (theta_).norm(1) - 1./m*( + y = lbda*1./np.sqrt(m) * (theta_).norm(1) - 1./m*( tensor.dot(1-w, tensor.log(1-tensor.exp(M.dot(theta_ *1./(n*m)))))\ + (1 - 1./(n*m)) * tensor.dot(1 - w, tensor.dot(M, theta_)) \ + tensor.dot(w, tensor.dot(M, theta_))) @@ -102,8 +101,8 @@ def diff_and_opt(theta, theta_, M, M_val, w, lbda, y): h = cvxopt.matrix(0.0, (n,1)) #Relaxing precision constraints - cvxopt.solvers.options['feastol'] = 2e-5 - cvxopt.solvers.options['abstol'] = 2e-5 + #cvxopt.solvers.options['feastol'] = 2e-5 + #cvxopt.solvers.options['abstol'] = 2e-5 #cvxopt.solvers.options['maxiters'] = 100 cvxopt.solvers.options['show_progress'] = True try: @@ -111,7 +110,9 @@ def diff_and_opt(theta, theta_, M, M_val, w, lbda, y): except ArithmeticError: print("ArithmeticError thrown, change initial point"+\ " given to the solver") - except + + if cvxopt.solvers.options['show_progress']: + print(1 - np.exp(theta)) return 1 - np.exp(theta), theta @@ -121,9 +122,9 @@ def test(): unit test """ lbda = 1 - G = cascade_creation.InfluenceGraph(max_proba=.8) - G.erdos_init(n=100, p = .1) - A = cascade_creation.generate_cascades(G, .1, 100) + G = cascade_creation.InfluenceGraph(max_proba=.9) + G.erdos_init(n=10, p = .3) + A = cascade_creation.generate_cascades(G, .1, 1000) M_val, w_val = cascade_creation.icc_matrixvector_for_node(A, 0) #Type lasso @@ -134,6 +135,7 @@ def test(): #Sparse recovery if 1: p_vec, theta = sparse_recovery(M_val, w_val, lbda) + print(G.mat[0]) print(p_vec) if __name__=="__main__": diff --git a/src/make_plots.py b/src/make_plots.py index 57c5caa..60c2292 100644 --- a/src/make_plots.py +++ b/src/make_plots.py @@ -32,11 +32,12 @@ def test(): unit test """ G = cascade_creation.InfluenceGraph(max_proba=.8) - G.erdos_init(n=100, p=.1) - A = cascade_creation.generate_cascades(G, p_init=.02, n_cascades=1000) + G.erdos_init(n=20, p=.2) + print(G.mat) + A = cascade_creation.generate_cascades(G, p_init=.1, n_cascades=1000) G_hat = algorithms.recovery_l1obj_l2constraint(G, A, passed_function=convex_optimization.sparse_recovery, - floor_cstt=.1, lbda=1) + floor_cstt=.1, lbda=20) algorithms.correctness_measure(G, G_hat, print_values=True) if __name__=="__main__": |
