summaryrefslogtreecommitdiffstats
path: root/hawkes/main.py
diff options
context:
space:
mode:
Diffstat (limited to 'hawkes/main.py')
-rw-r--r--hawkes/main.py133
1 files changed, 116 insertions, 17 deletions
diff --git a/hawkes/main.py b/hawkes/main.py
index 65586ad..d98f6a0 100644
--- a/hawkes/main.py
+++ b/hawkes/main.py
@@ -1,8 +1,31 @@
from cPickle import load
-from math import log, exp
-#import numpy as np
-#from scipy.optimize import basinhopping
+from math import log, exp, sqrt
+from random import random, gauss
+import sys
from itertools import product
+from multiprocessing import Pool
+# import numpy as np
+# from scipy.optimize import basinhopping
+
+GR = (sqrt(5) - 1) / 2
+
+
+def gss(f, a, b, tol=1e-20):
+ """golden section search to minimize along one coordinate"""
+ c = b - GR * (b - a)
+ d = a + GR * (b - a)
+ while abs(c - d) > tol:
+ fc = f(c)
+ fd = f(d)
+ if fc < fd:
+ b = d
+ d = c
+ c = b - GR * (b - a)
+ else:
+ a = c
+ c = d
+ d = a + GR * (b - a)
+ return (b + a) / 2
def iter_events(events):
@@ -11,9 +34,7 @@ def iter_events(events):
yield (n, t)
-def ll(a):
- lamb, alpha, mu = a
-
+def ll_old(lamb, alpha, mu):
r1 = sum(log(lamb + sum(alpha * w * mu * exp(-mu * (t1 - t2))
for (n2, t2, w) in s))
for ((n1, t1), s) in event_edges.iteritems())
@@ -24,20 +45,98 @@ def ll(a):
return -(r1 - r2 - r3)
-if __name__ == "__main__":
- nodes, edges, events, event_edges = load(open("data.pickle", "rb"))
+def ll(lamb, alpha, mu):
+ r1 = sum(log(lamb + sum(alpha / d * mu * exp(-mu * (t1 - t2))
+ for (n2, t2, d) in s))
+ for ((n1, t1), s) in event_edges.iteritems())
+ r2 = sum(sum(alpha / d * (1 - exp(-mu * (nodes[n2] - t1)))
+ for n2, d in edges[n1].iteritems() if nodes[n2] > t1)
+ for (n1, t1) in iter_events(events))
+ r3 = lamb * sum(nodes.itervalues())
+ return -(r1 - r2 - r3)
+
+
+def sa(x, y, z, sigma=0.1, niter=100):
+ T = 1
+ e = 1.01
+ for _ in xrange(niter):
+ fo = ll(x, y, z)
+ #print T, sigma, x, y, z, fo
+ yn = max(y + gauss(0, sigma * y + 1e-10), 0)
+ zn = max(z + gauss(0, sigma * z + 1e-10), 0)
+ fn = ll(x, yn, zn)
+ if fn < fo or exp((fo - fn) / T) > random():
+ y = yn
+ z = zn
+ sigma *= e
+ else:
+ sigma /= e
+ if sigma < 1e-5:
+ break
+ T *= 0.99
+ return y, z
+
+
+def optimize_with_sa(x, y, z):
+ #y, z = sa(x, y, z)
+ while True:
+ x = gss(f, 0, 1, tol=1e-10)
+ y, z = sa(x, y, z)
+ print x, y, z, ll(x, y, z)
+ sys.stdout.flush()
+
+
+def optimize_with_gss(x, y, z):
+ while True:
+ x = gss(f, 0, 1, tol=1e-20)
+ y = gss(g, 0, 1, tol=1e-20)
+ z = gss(h, 0, 1, tol=1e-20)
+ print x, y, z, ll(x, y, z)
+ sys.stdout.flush()
+
+
+def coarse_search():
d = {}
for line in open("values.txt"):
v = map(float, line.strip().split())
d[tuple(v[:3])] = v[3]
+ p = Pool(5)
+ lamb = [20. ** i for i in range(-15, 15)]
+ alpha = [20. ** i for i in range(-15, 15)]
+ mu = [20. ** i for i in range(-15, 15)]
+
+ def aux(x):
+ if x in d:
+ return
+ l, a, m = x
+ print l, a, m, ll(l, a, m)
- # lamb = [20. ** i for i in range(-15, 15)]
- # alpha = [20. ** i for i in range(-15, 15)]
- # mu = [20. ** i for i in range(-15, 15)]
+ p.map(aux, product(lamb, alpha, mu))
# for l, a, m in product(lamb, alpha, mu):
- # if (l, a, m) in d:
- # continue
- # print l, a, m, ll((l, a, m))
- print ll((2, 10000000000., 0.000000000000000000001))
- #r = basinhopping(ll, init, disp=True, stepsize=0.1, T=10000., niter=500)
- #print r.x
+ # print l, a, m, ll(*(l, a, m))
+
+
+if __name__ == "__main__":
+ nodes, edges, events, event_edges = load(open("data2.pickle", "rb"))
+
+ def f(x):
+ global y, z
+ return ll(x, y, z)
+
+ def g(y):
+ global x, z
+ return ll(x, y, z)
+
+ def h(z):
+ global x, y
+ return ll(x, y, z)
+
+ # x, y, z = map(float, sys.argv[1:])
+ x = 1.8e-5
+ y = 1.
+ z = 1.
+ #x, y, z = 1.8387870804e-05, 0.0383954874502, 0.00108402386463
+ x, y, z = 0.000125, 0.000125, 0.000125
+ #optimize_with_sa(x, y, z)
+ #optimize_with_gss(x, y, z)
+ coarse_search()