diff options
| author | Thibaut Horel <thibaut.horel@gmail.com> | 2015-09-12 16:37:44 -0400 |
|---|---|---|
| committer | Thibaut Horel <thibaut.horel@gmail.com> | 2015-09-12 16:37:44 -0400 |
| commit | bfae098bbc47b3948a2a794bee4e49ff32504d6b (patch) | |
| tree | fcd344ae14be1519acb2649157252e5fa0ae4aea | |
| parent | 717e7f2edda52100406d17826bd617b7915c5daf (diff) | |
| download | criminal_cascades-bfae098bbc47b3948a2a794bee4e49ff32504d6b.tar.gz | |
Incorporate the time varying model for the background rate
| -rw-r--r-- | hawkes/data2.py | 10 | ||||
| -rw-r--r-- | hawkes/main.py | 17 |
2 files changed, 17 insertions, 10 deletions
diff --git a/hawkes/data2.py b/hawkes/data2.py index c091e7a..b8f83c9 100644 --- a/hawkes/data2.py +++ b/hawkes/data2.py @@ -2,6 +2,7 @@ from csv import DictReader import sys from itertools import product from cPickle import dump +from math import cos MAX_TIME = 3012 @@ -10,13 +11,18 @@ def parse(s): return None if s == "NA" else int(float(s)) +def fluctuation_int(t): + if t is None: + t = MAX_TIME + return t + 0.43 / 0.0172 * (cos(4.36) - cos(0.0172 * t + 4.36)) + + def load_nodes(filename): with open(filename) as fh: reader = DictReader(fh) d = {parse(row["name"]): parse(row["fatal_day"]) for row in reader} for n, t in d.iteritems(): - if t is None: - d[n] = MAX_TIME + d[n] = fluctuation_int(t) return d diff --git a/hawkes/main.py b/hawkes/main.py index d98f6a0..a922812 100644 --- a/hawkes/main.py +++ b/hawkes/main.py @@ -1,11 +1,9 @@ from cPickle import load -from math import log, exp, sqrt +from math import log, exp, sqrt, sin from random import random, gauss import sys from itertools import product from multiprocessing import Pool -# import numpy as np -# from scipy.optimize import basinhopping GR = (sqrt(5) - 1) / 2 @@ -46,8 +44,9 @@ def ll_old(lamb, alpha, mu): def ll(lamb, alpha, mu): - r1 = sum(log(lamb + sum(alpha / d * mu * exp(-mu * (t1 - t2)) - for (n2, t2, d) in s)) + r1 = sum(log(lamb * (1 + 0.43 * sin(0.0172 * t1 + 4.36)) + + sum(alpha / d * mu * exp(-mu * (t1 - t2)) + for (n2, t2, d) in s)) for ((n1, t1), s) in event_edges.iteritems()) r2 = sum(sum(alpha / d * (1 - exp(-mu * (nodes[n2] - t1))) for n2, d in edges[n1].iteritems() if nodes[n2] > t1) @@ -78,7 +77,7 @@ def sa(x, y, z, sigma=0.1, niter=100): def optimize_with_sa(x, y, z): - #y, z = sa(x, y, z) + # y, z = sa(x, y, z) while True: x = gss(f, 0, 1, tol=1e-10) y, z = sa(x, y, z) @@ -110,6 +109,7 @@ def coarse_search(): return l, a, m = x print l, a, m, ll(l, a, m) + sys.stdout.flush() p.map(aux, product(lamb, alpha, mu)) # for l, a, m in product(lamb, alpha, mu): @@ -136,7 +136,8 @@ if __name__ == "__main__": y = 1. z = 1. #x, y, z = 1.8387870804e-05, 0.0383954874502, 0.00108402386463 - x, y, z = 0.000125, 0.000125, 0.000125 + x, y, z = 1.83e-5, 0.00125, 0.00125 #optimize_with_sa(x, y, z) #optimize_with_gss(x, y, z) - coarse_search() + print ll(x, y, z) + #coarse_search() |
