diff options
Diffstat (limited to 'simulation')
| -rw-r--r-- | simulation/mle_blocks.py | 6 | ||||
| -rw-r--r-- | simulation/utils_blocks.py | 5 | ||||
| -rw-r--r-- | simulation/vi_blocks.py | 18 |
3 files changed, 18 insertions, 11 deletions
diff --git a/simulation/mle_blocks.py b/simulation/mle_blocks.py index 0d27869..98bc257 100644 --- a/simulation/mle_blocks.py +++ b/simulation/mle_blocks.py @@ -54,8 +54,10 @@ if __name__ == "__main__": bm.TrainingDataMonitoring([cost, params, rmse, error], every_n_batches=freq), be.Printing(every_n_batches=freq), - ub.JSONDump("logs/active_outdegree_mle.json", every_n_batches=freq), - ub.ActiveLearning(data_stream.dataset, graph, every_n_batches=freq), + ub.JSONDump("logs/active_estoutdegree_mle.json", + every_n_batches=freq), + ub.ActiveLearning(data_stream.dataset, params, + every_n_batches=freq), ], ) loop.run() diff --git a/simulation/utils_blocks.py b/simulation/utils_blocks.py index 72a6881..00b429e 100644 --- a/simulation/utils_blocks.py +++ b/simulation/utils_blocks.py @@ -37,7 +37,10 @@ class ActiveLearning(be.SimpleExtension): self.params = params def do(self, which_callback, *args): - exp_out_par = np.exp(np.sum(self.params, axis=1)) + try: + exp_out_par = np.exp(np.sum(self.params.get_value(), axis=1)) + except AttributeError: + exp_out_par = np.exp(np.sum(self.params, axis=1)) self.dataset.node_p = exp_out_par / np.sum(exp_out_par) diff --git a/simulation/vi_blocks.py b/simulation/vi_blocks.py index 5deb6f6..50c7fb1 100644 --- a/simulation/vi_blocks.py +++ b/simulation/vi_blocks.py @@ -52,16 +52,17 @@ def create_vi_model(n_nodes, n_samp=100): if __name__ == "__main__": batch_size = 100 - frequency = 10 + freq = 10 + graph = utils.create_wheel(1000) n_samples = 50 - graph = utils.create_random_graph(n_nodes=10) + #graph = utils.create_random_graph(n_nodes=10) print('GRAPH:\n', graph, '\n-------------\n') x, s, mu, sig, cost = create_vi_model(len(graph), n_samples) rmse = ub.rmse_error(graph, mu) step_rules = algorithms.CompositeRule([algorithms.AdaDelta(), - ClippedParams(1e-3, 1 - 1e-3)]) + ClippedParams(1e-3, 1000)]) alg = algorithms.GradientDescent(cost=cost, parameters=[mu, sig], step_rule=step_rules) @@ -70,11 +71,12 @@ if __name__ == "__main__": alg, data_stream, log_backend="sqlite", extensions=[ - be.FinishAfter(after_n_batches=10**4), - bm.TrainingDataMonitoring([cost, rmse, mu], every_n_batches=frequency), - be.Printing(every_n_batches=frequency, after_epoch=False), - ub.JSONDump("logs/tmp.json", every_n_batches=10), - #ub.ActiveLearning(dataset=data_stream.dataset, params=graph) + be.FinishAfter(after_n_batches=10**3), + bm.TrainingDataMonitoring([cost, rmse, mu], every_n_batches=freq), + be.Printing(every_n_batches=freq, after_epoch=False), + ub.JSONDump("logs/nonactive_vi.json", every_n_batches=freq), + #ub.ActiveLearning(dataset=data_stream.dataset, params=graph, + #every_n_batches=freq) ] ) loop.run() |
