aboutsummaryrefslogtreecommitdiffstats
path: root/simulation
diff options
context:
space:
mode:
Diffstat (limited to 'simulation')
-rw-r--r--simulation/mle_blocks.py4
-rw-r--r--simulation/utils_blocks.py8
-rw-r--r--simulation/vi_blocks.py15
3 files changed, 15 insertions, 12 deletions
diff --git a/simulation/mle_blocks.py b/simulation/mle_blocks.py
index 98bc257..1b1cf4d 100644
--- a/simulation/mle_blocks.py
+++ b/simulation/mle_blocks.py
@@ -32,14 +32,14 @@ if __name__ == "__main__":
batch_size = 100
#n_obs = 100000
freq = 10
- graph = utils.create_wheel(1000)
+ graph = utils.create_wheel(100)
print('GRAPH:\n', graph, '\n-------------\n')
g_shared = theano.shared(value=graph, name='graph')
x, s, params, cost = create_mle_model(graph)
rmse = ub.rmse_error(g_shared, params)
- error = ub.relative_error(g_shared, params)
+ error = ub.absolute_error(g_shared, params)
alg = algorithms.GradientDescent(
cost=-cost, parameters=[params], step_rule=algorithms.AdaDelta()
diff --git a/simulation/utils_blocks.py b/simulation/utils_blocks.py
index 00b429e..2dc9f85 100644
--- a/simulation/utils_blocks.py
+++ b/simulation/utils_blocks.py
@@ -85,18 +85,18 @@ def rmse_error(graph, params):
diff = (graph - params) ** 2
subarray = tsr.arange(n_nodes)
tsr.set_subtensor(diff[subarray, subarray], 0)
- rmse = tsr.sum(diff) / (n_nodes ** 2)
+ rmse = tsr.sqrt(tsr.sum(diff) / (n_nodes ** 2))
rmse.name = 'rmse'
return rmse
-def relative_error(graph, params):
+def absolute_error(graph, params):
n_nodes = graph.shape[0]
diff = abs(graph - params)
subarray = tsr.arange(n_nodes)
tsr.set_subtensor(diff[subarray, subarray], 0)
- error = tsr.sum(tsr.switch(tsr.eq(graph, 0.), 0., diff / graph)) / n_nodes
- error.name = 'rel_error'
+ error = tsr.sum(diff) / (n_nodes ** 2)
+ error.name = 'abs_error'
return error
diff --git a/simulation/vi_blocks.py b/simulation/vi_blocks.py
index 50c7fb1..e2e3bd9 100644
--- a/simulation/vi_blocks.py
+++ b/simulation/vi_blocks.py
@@ -26,7 +26,7 @@ def create_vi_model(n_nodes, n_samp=100):
"""return variational inference theano computation graph"""
def aux(a, b):
rand = a + b * np.random.normal(size=(n_nodes, n_nodes))
- return np.clip(rand, 1e-3, 1 - 1e-3).astype(theano.config.floatX)
+ return np.clip(rand, 1e-10, 1000).astype(theano.config.floatX)
x = tsr.matrix(name='x', dtype='int8')
s = tsr.matrix(name='s', dtype='int8')
@@ -54,12 +54,14 @@ if __name__ == "__main__":
batch_size = 100
freq = 10
graph = utils.create_wheel(1000)
+ g_shared = theano.shared(value=graph, name='graph')
n_samples = 50
- #graph = utils.create_random_graph(n_nodes=10)
+ # graph = utils.create_random_graph(n_nodes=10)
print('GRAPH:\n', graph, '\n-------------\n')
x, s, mu, sig, cost = create_vi_model(len(graph), n_samples)
- rmse = ub.rmse_error(graph, mu)
+ rmse = ub.rmse_error(g_shared, mu)
+ error = ub.absolute_error(g_shared, mu)
step_rules = algorithms.CompositeRule([algorithms.AdaDelta(),
ClippedParams(1e-3, 1000)])
@@ -72,11 +74,12 @@ if __name__ == "__main__":
log_backend="sqlite",
extensions=[
be.FinishAfter(after_n_batches=10**3),
- bm.TrainingDataMonitoring([cost, rmse, mu], every_n_batches=freq),
+ bm.TrainingDataMonitoring([cost, rmse, mu, error],
+ every_n_batches=freq),
be.Printing(every_n_batches=freq, after_epoch=False),
ub.JSONDump("logs/nonactive_vi.json", every_n_batches=freq),
- #ub.ActiveLearning(dataset=data_stream.dataset, params=graph,
- #every_n_batches=freq)
+ # ub.ActiveLearning(dataset=data_stream.dataset, params=graph,
+ # every_n_batches=freq)
]
)
loop.run()