aboutsummaryrefslogtreecommitdiffstats
path: root/simulation
diff options
context:
space:
mode:
authorjeanpouget-abadie <jean.pougetabadie@gmail.com>2015-12-02 15:43:34 -0500
committerjeanpouget-abadie <jean.pougetabadie@gmail.com>2015-12-02 15:43:34 -0500
commitde815c196ad03e5d76cc675696b9cd7c1b3b3fbb (patch)
tree3fb1cc6932e02a80c8955e31d900a0c7de268d70 /simulation
parentccb192c4190701531094b46df85725158d4e9ffc (diff)
downloadcascades-de815c196ad03e5d76cc675696b9cd7c1b3b3fbb.tar.gz
changing active learning definition plus main loops of mle/vi_blocks
Diffstat (limited to 'simulation')
-rw-r--r--simulation/mle_blocks.py13
-rw-r--r--simulation/utils_blocks.py10
-rw-r--r--simulation/vi_blocks.py17
3 files changed, 19 insertions, 21 deletions
diff --git a/simulation/mle_blocks.py b/simulation/mle_blocks.py
index ab8816f..0d27869 100644
--- a/simulation/mle_blocks.py
+++ b/simulation/mle_blocks.py
@@ -30,8 +30,9 @@ def create_mle_model(graph):
if __name__ == "__main__":
batch_size = 100
- n_obs = 100000
- graph = utils.create_wheel(100)
+ #n_obs = 100000
+ freq = 10
+ graph = utils.create_wheel(1000)
print('GRAPH:\n', graph, '\n-------------\n')
@@ -51,10 +52,10 @@ if __name__ == "__main__":
extensions=[
be.FinishAfter(after_n_batches=10**3),
bm.TrainingDataMonitoring([cost, params,
- rmse, error], every_n_batches=10),
- be.Printing(every_n_batches=10),
- ub.JSONDump("log.json", every_n_batches=10),
- ub.ActiveLearning(data_stream.dataset),
+ rmse, error], every_n_batches=freq),
+ be.Printing(every_n_batches=freq),
+ ub.JSONDump("logs/active_outdegree_mle.json", every_n_batches=freq),
+ ub.ActiveLearning(data_stream.dataset, graph, every_n_batches=freq),
],
)
loop.run()
diff --git a/simulation/utils_blocks.py b/simulation/utils_blocks.py
index 3b29972..72a6881 100644
--- a/simulation/utils_blocks.py
+++ b/simulation/utils_blocks.py
@@ -31,16 +31,14 @@ class ActiveLearning(be.SimpleExtension):
Extension which updates the node_p array passed to the get_data method of
LearnedDataset
"""
- def __init__(self, dataset, **kwargs):
+ def __init__(self, dataset, params, **kwargs):
super(ActiveLearning, self).__init__(**kwargs)
self.dataset = dataset
+ self.params = params
def do(self, which_callback, *args):
- out_degree = np.sum(self.dataset.graph, axis=1)
- self.dataset.node_p = out_degree / np.sum(out_degree)
-
-# def do(self, which_callback, *args):
-
+ exp_out_par = np.exp(np.sum(self.params, axis=1))
+ self.dataset.node_p = exp_out_par / np.sum(exp_out_par)
class JSONDump(be.SimpleExtension):
diff --git a/simulation/vi_blocks.py b/simulation/vi_blocks.py
index b78375b..5deb6f6 100644
--- a/simulation/vi_blocks.py
+++ b/simulation/vi_blocks.py
@@ -51,10 +51,10 @@ def create_vi_model(n_nodes, n_samp=100):
if __name__ == "__main__":
- n_cascades = 10000
- batch_size = 10
+ batch_size = 100
+ frequency = 10
n_samples = 50
- graph = utils.create_random_graph(n_nodes=4)
+ graph = utils.create_random_graph(n_nodes=10)
print('GRAPH:\n', graph, '\n-------------\n')
x, s, mu, sig, cost = create_vi_model(len(graph), n_samples)
@@ -65,17 +65,16 @@ if __name__ == "__main__":
alg = algorithms.GradientDescent(cost=cost, parameters=[mu, sig],
step_rule=step_rules)
- data_stream = ub.fixed_data_stream(n_cascades, graph, batch_size,
- shuffle=False)
- # data_stream = ub.dynamic_data_stream(graph, batch_size)
+ data_stream = ub.dynamic_data_stream(graph, batch_size)
loop = main_loop.MainLoop(
alg, data_stream,
log_backend="sqlite",
extensions=[
be.FinishAfter(after_n_batches=10**4),
- bm.TrainingDataMonitoring([cost, mu, sig, rmse],
- every_n_batches=10),
- be.Printing(every_n_batches=100, after_epoch=False),
+ bm.TrainingDataMonitoring([cost, rmse, mu], every_n_batches=frequency),
+ be.Printing(every_n_batches=frequency, after_epoch=False),
+ ub.JSONDump("logs/tmp.json", every_n_batches=10),
+ #ub.ActiveLearning(dataset=data_stream.dataset, params=graph)
]
)
loop.run()