aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--src/convex_optimization.py2
-rw-r--r--src/make_plots.py26
2 files changed, 14 insertions, 14 deletions
diff --git a/src/convex_optimization.py b/src/convex_optimization.py
index d667c6e..39f7ee7 100644
--- a/src/convex_optimization.py
+++ b/src/convex_optimization.py
@@ -107,7 +107,7 @@ def diff_and_opt(M_val, w_val, f_x, f_xz):
#Relaxing precision constraints
cvxopt.solvers.options['feastol'] = 2e-5
cvxopt.solvers.options['abstol'] = 2e-5
- #cvxopt.solvers.options['maxiters'] = 100
+ cvxopt.solvers.options['maxiters'] = 50
cvxopt.solvers.options['show_progress'] = False
try:
theta = cvxopt.solvers.cp(F, G, h)['x']
diff --git a/src/make_plots.py b/src/make_plots.py
index 7da3e1e..d92b008 100644
--- a/src/make_plots.py
+++ b/src/make_plots.py
@@ -32,7 +32,7 @@ def watts_strogatz(n_cascades, lbda, passed_function):
Test running time on different algorithms
"""
G = cascade_creation.InfluenceGraph(max_proba=.7, min_proba=.2)
- G.import_from_file("../datasets/watts_strogatz_500_80_point3.txt")
+ G.import_from_file("../datasets/watts_strogatz_300_30_point3.txt")
A = cascade_creation.generate_cascades(G, p_init=.05, n_cascades=n_cascades)
if passed_function==algorithms.greedy_prediction:
@@ -44,19 +44,19 @@ def watts_strogatz(n_cascades, lbda, passed_function):
algorithms.correctness_measure(G, G_hat, print_values=True)
-def test():
- """
- unit test
- """
- G = cascade_creation.InfluenceGraph(max_proba=1, min_proba=.2)
- G.erdos_init(n=50, p=.2)
- A = cascade_creation.generate_cascades(G, p_init=.1, n_cascades=1000)
- G_hat = algorithms.recovery_passed_function(G, A,
- passed_function=convex_optimization.sparse_recovery,
- floor_cstt=.1, lbda=.001, n_cascades=1000)
- algorithms.correctness_measure(G, G_hat, print_values=True)
+# def test():
+# """
+# unit test
+# """
+# G = cascade_creation.InfluenceGraph(max_proba=1, min_proba=.2)
+# G.erdos_init(n=50, p=.2)
+# A = cascade_creation.generate_cascades(G, p_init=.1, n_cascades=1000)
+# G_hat = algorithms.recovery_passed_function(G, A,
+# passed_function=convex_optimization.sparse_recovery,
+# floor_cstt=.1, lbda=.001, n_cascades=1000)
+# algorithms.correctness_measure(G, G_hat, print_values=True)
if __name__=="__main__":
- watts_strogatz(n_cascades=1000, lbda=.001, passed_function=
+ watts_strogatz(n_cascades=500, lbda=.001, passed_function=
convex_optimization.sparse_recovery)