import matplotlib.pyplot as plt import numpy as np import cascade_creation import convex_optimization import algorithms import rip_condition def compare_greedy_and_lagrange_cs284r(): """ Compares the performance of the greedy algorithm on the lagrangian sparse recovery objective on the Facebook dataset for the CS284r project """ G = cascade_creation.InfluenceGraph(max_proba = .8) G.import_from_file("../datasets/subset_facebook_SNAPnormalize.txt") A = cascade_creation.generate_cascades(G, p_init=.05, n_cascades=100) #Greedy G_hat = algorithms.greedy_prediction(G, A) algorithms.correctness_measure(G, G_hat, print_values=True) #Lagrange Objective G_hat = algorithms.recovery_l1obj_l2constraint(G, A, passed_function=convex_optimization.type_lasso, floor_cstt=.05, lbda=10) algorithms.correctness_measure(G, G_hat, print_values=True) def compute_graph(graph_name, n_cascades, lbda, passed_function): """ Test running time on different algorithms """ G = cascade_creation.InfluenceGraph(max_proba=.7, min_proba=.2) G.import_from_file(graph_name) A = cascade_creation.generate_cascades(G, p_init=.05, n_cascades=n_cascades) if passed_function==algorithms.greedy_prediction: G_hat = algorithms.greedy_prediction(G, A) else: G_hat = algorithms.recovery_passed_function(G, A, passed_function=passed_function, floor_cstt=.1, lbda=lbda, n_cascades=n_cascades) algorithms.correctness_measure(G, G_hat, print_values=True) def plot_watts_strogatz_graph(): """ plot information in a pretty way """ plt.clf() fig = plt.figure(1) labels = [50, 100, 500, 1000, 2000, 5000] x = [np.log(50), np.log(100), np.log(500), np.log(1000), np.log(2000), np.log(5000)] sparse_recov = [.25, .32, .7, .82, .89, .92] max_likel = [.21, .29, .67, .8, .87, .9] lasso = [.07, .30, .46, .65, .86, .89] greedy = [.09, .15, .4, .63, .82, .92] fig, ax = plt.subplots() plt.axis((np.log(45), np.log(5500), 0, 1)) plt.xlabel("Number of Cascades") plt.ylabel("F1 score") plt.grid(color="lightgrey") ax.plot(x, greedy, 'ko-', color="lightseagreen", label='Greedy') ax.plot(x, lasso, 'ko-', color="orange", label="Lasso") ax.plot(x, max_likel, 'ko-', color="cornflowerblue", label="MLE") ax.plot(x, sparse_recov, 'ko-', color="k", label="Our Method") plt.legend(loc="lower right") ax.set_xticks(x) ax.set_xticklabels(tuple(labels)) plt.savefig("../paper/figures/"+"watts_strogatz.pdf") def plot_ROC_curve(figure_name): """ plot information in a pretty way """ plt.clf() fig = plt.figure(1) #labels = [0, .00002, .002, .02, .2, .5] x_sparse = [.57, .6, .61, .76, .9] y_sparse = [.41, .4, .37, .16, .03] x_lasso = [.55, .56, .66] y_lasso = [.5, .43, .25] fig, ax = plt.subplots() plt.axis((np.log(45), np.log(5500), 0, 1)) plt.xlabel("Number of Cascades") plt.ylabel("F1 score") plt.grid(color="lightgrey") ax.plot(x_lasso, y_lasso, 'ko-', color="orange", label="Lasso") ax.plot(x_sparse, y_sparse, 'ko-', color="k", label="Our Method") plt.legend(loc="lower right") ax.set_xticks(x) ax.set_xticklabels(tuple(labels)) plt.savefig("../paper/figures/"+figure_name) if __name__=="__main__": if 0: compute_graph("../datasets/watts_strogatz_300_30_point3.txt", n_cascades=100, lbda=.01, passed_function= #convex_optimization.sparse_recovery) #algorithms.greedy_prediction) convex_optimization.sparse_recovery) if 0: compute_graph("../datasets/powerlaw_200_30_point3.txt", n_cascades=300, lbda=.002, passed_function= convex_optimization.sparse_recovery) #algorithms.greedy_prediction) #convex_optimization.type_lasso) if 1: compute_graph("../datasets/barabasi_albert_300_30.txt", n_cascades=100, lbda=.002, passed_function= convex_optimization.sparse_recovery) #algorithms.greedy_prediction) #convex_optimization.type_lasso)