From 9c2d0453e83d7a2472a02bcc09fc7b2a5c79fc6a Mon Sep 17 00:00:00 2001 From: ericbalkanski Date: Sun, 7 Dec 2014 17:16:57 -0500 Subject: Revert "Revert 566f924..4ba0141" This reverts commit 6e15f30cda55b7bff805e2475f2300e63e59318e. --- datasets/subset_facebook_SNAPnormalize.txt | 2 +- notes/reportYaron.tex | 5 +++-- src/algorithms.py | 2 +- src/cascade_creation.py | 2 +- src/make_plots.py | 2 +- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/datasets/subset_facebook_SNAPnormalize.txt b/datasets/subset_facebook_SNAPnormalize.txt index 30851e3..049260c 100644 --- a/datasets/subset_facebook_SNAPnormalize.txt +++ b/datasets/subset_facebook_SNAPnormalize.txt @@ -5035,4 +5035,4 @@ 242 107 317 306 331 249 -204 255 +204 255 \ No newline at end of file diff --git a/notes/reportYaron.tex b/notes/reportYaron.tex index acdfaea..d5822ed 100644 --- a/notes/reportYaron.tex +++ b/notes/reportYaron.tex @@ -19,7 +19,9 @@ Given a set of observed cascades, the \textbf{graph reconstruction problem} cons \section{Related Work} -In previous work, this problem has been formulated in different ways, including a convex optimization and a maximum likelihood problem. However, there is no known algorithm for graph reconstruction with theoretical guarantees and with a reasonable required sample size. +There have been several works tackling the graph reconstruction problem in variants of the independent cascade. We briefly summarize their results and approaches below. + + \section{The Voter Model} @@ -278,7 +280,6 @@ $\delta_4 = .54$ & $\delta_4 = .37$ & $\delta_4 = .43$ & $\delta_4 = .23$ \\ The results of our findings on a very small social network (a subset of the famous Karate club), show that as the number of cascades increase the RIP constants decrease and that if $p_\text{init}$ is small then the RIP constant decrease as well. Finally the constants we obtain are either under or close to the $.25$ mark set by the authors of \cite{candes}. - \subsection{Testing our algorithm} diff --git a/src/algorithms.py b/src/algorithms.py index 0e240c9..39bcbb2 100644 --- a/src/algorithms.py +++ b/src/algorithms.py @@ -60,7 +60,7 @@ def correctness_measure(G, G_hat, print_values=False): edges_hat = set(G_hat.edges()) fp = len(edges_hat - edges) fn = len(edges - edges_hat) - tp = len(edges | edges_hat) + tp = len(edges & edges_hat) tn = G.number_of_nodes() ** 2 - fp - fn - tp #Other metrics diff --git a/src/cascade_creation.py b/src/cascade_creation.py index 9a26c03..1a71285 100644 --- a/src/cascade_creation.py +++ b/src/cascade_creation.py @@ -4,7 +4,7 @@ import collections from itertools import izip from sklearn.preprocessing import normalize -class InfluenceGraph(nx.Graph): +class InfluenceGraph(nx.DiGraph): """ networkX graph with mat and logmat attributes """ diff --git a/src/make_plots.py b/src/make_plots.py index 7c8bebb..905c731 100644 --- a/src/make_plots.py +++ b/src/make_plots.py @@ -40,7 +40,7 @@ def compare_greedy_and_lagrange_cs284r(): """ G = cascade_creation.InfluenceGraph(max_proba = .8) G.import_from_file("../datasets/subset_facebook_SNAPnormalize.txt") - A = cascade_creation.generate_cascades(G, p_init=.05, n_cascades=50) + A = cascade_creation.generate_cascades(G, p_init=.05, n_cascades=100) #Greedy G_hat = algorithms.greedy_prediction(G, A) -- cgit v1.2.3-70-g09d2