diff options
Diffstat (limited to 'simulation/utils_blocks.py')
| -rw-r--r-- | simulation/utils_blocks.py | 35 |
1 files changed, 34 insertions, 1 deletions
diff --git a/simulation/utils_blocks.py b/simulation/utils_blocks.py index 5e91658..0d30786 100644 --- a/simulation/utils_blocks.py +++ b/simulation/utils_blocks.py @@ -38,7 +38,9 @@ class ActiveLearning(be.SimpleExtension): def do(self, which_callback, *args): out_degree = np.sum(self.dataset.graph, axis=1) self.dataset.node_p = out_degree / np.sum(out_degree) - print(self.dataset.node_p) + +# def do(self, which_callback, *args): + class JSONDump(be.SimpleExtension): @@ -119,3 +121,34 @@ def dynamic_data_stream(graph, batch_size): data_set = LearnedDataset(node_p, graph) scheme = fuel.schemes.ConstantScheme(batch_size) return fuel.streams.DataStream(dataset=data_set, iteration_scheme=scheme) + + +if __name__ == "__main__": + batch_size = 100 + n_obs = 1000 + frequency = 1 + graph = utils.create_wheel(1000) + print('GRAPH:\n', graph, '\n-------------\n') + + g_shared = theano.shared(value=graph, name='graph') + x, s, params, cost = create_mle_model(graph) + rmse = rmse_error(g_shared, params) + error = relative_error(g_shared, params) + + alg = algorithms.GradientDescent( + cost=-cost, parameters=[params], step_rule=blocks.algorithms.AdaDelta() + ) + data_stream = create_learned_data_stream(graph, batch_size) + #data_stream = create_fixed_data_stream(n_obs, graph, batch_size) + loop = main_loop.MainLoop( + alg, data_stream, + extensions=[ + be.FinishAfter(after_n_batches=10**4), + bm.TrainingDataMonitoring([cost, rmse, error], + every_n_batches=frequency), + be.Printing(every_n_batches=frequency), + JSONDump("tmpactive_log.json", every_n_batches=frequency), + ActiveLearning(data_stream.dataset, every_n_batches=frequency) + ], + ) + loop.run() |
