aboutsummaryrefslogtreecommitdiffstats
path: root/python/thetas-durations.py
blob: bf7d41a7ab3e41f264eb6134916411bf2398c5f5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
import datetime
import numpy as np
import pandas as pd
from analytics.utils import tenor_t
from pandas.tseries.offsets import BDay
from dateutil.relativedelta import relativedelta
from yieldcurve import get_curve
from utils.db import serenitas_engine
from pyisda.legs import FeeLeg, ContingentLeg
from pyisda.curve import SpreadCurve
from pyisda.date import previous_twentieth

tenors = {"IG": ("3yr", "5yr", "7yr", "10yr"),
          "HY": ("3yr", "5yr", "7yr"),
          "EU": ("3yr", "5yr", "7yr", "10yr"),
          "XO": ("3yr", "5yr", "7yr", "10yr")}

sql_str = "INSERT INTO index_risk VALUES(%s, %s, %s)"

def get_legs(index, series, tenors):
    fee_legs = {}
    contingent_legs = {}
    df = pd.read_sql_query("SELECT tenor, maturity, coupon, issue_date "
                           "FROM index_maturity "
                           "WHERE index=%s AND series=%s and tenor IN %s "
                           "ORDER BY maturity",
                           serenitas_engine,
                           params=(index, series, tenors),
                           parse_dates=['maturity', 'issue_date'])
    df.coupon *= 1e-4
    for tenor, maturity, coupon, issue_date in df.itertuples(index=False):
        maturity_short = maturity - relativedelta(years=1)
        fee_legs[tenor] = (FeeLeg(issue_date, maturity, True, 1., 1.),
                           FeeLeg(issue_date, maturity_short,
                                  True, 1., coupon))
        contingent_legs[tenor] = (ContingentLeg(issue_date, maturity, True),
                                  ContingentLeg(issue_date, maturity_short, True))
    # number of seconds since epoch
    # number of days between 1900-1-1 and epoch
    df.maturity = (df.maturity.values.view("M8[D]").astype(int) + 134774)
    return fee_legs, contingent_legs, df

def index_pv(fl, cl, value_date, step_in_date, cash_settle_date, yc, sc, recovery):
    dl_pv = cl.pv(value_date, step_in_date, cash_settle_date, yc, sc, recovery)
    cl_pv = fl.pv(value_date, step_in_date, cash_settle_date, yc, sc, True)
    return dl_pv - cl_pv

if __name__ == "__main__":
    conn = serenitas_engine.raw_connection()
    for index in ["IG", "HY", "EU", "XO"]:
        if index in ["HY", "XO"]:
            recoveries = np.full(len(tenors[index]), 0.3)
        else:
            recoveries = np.full(len(tenors[index]), 0.4)
        for series in range(18, 33):
            if index in ["EU", "XO"] and series == 32:
                continue
            fee_legs, contingent_legs, df = \
                get_legs(index, series, tenors[index])

            index_quotes = pd.read_sql_query(
                "SELECT distinct on (date, tenor) id, date, tenor, version, close_price "
                "FROM index_quotes_pre "
                "LEFT JOIN index_risk USING (id) "
                "WHERE index=%s AND series=%s "
                "AND source='MKIT' AND duration is NULL AND tenor IN %s "
                "ORDER BY date, tenor, version desc",
                serenitas_engine,
                params=(index, series, tenors[index]),
                parse_dates=['date'],
                index_col='id')
            if index_quotes.empty:
                continue
            index_quotes.tenor = index_quotes.tenor.astype(tenor_t)
            index_quotes = index_quotes.sort_values('tenor')
            index_quotes['close_price'] = 1. - index_quotes['close_price'] / 100
            with conn.cursor() as c:
                for value_date, data in index_quotes.groupby('date'):
                    yc = get_curve(value_date, "USD" if index in ["IG", "HY"] else "EUR")
                    # right_index?? is it a bug?
                    data = data.merge(df, on='tenor', right_index=True)
                    step_in_date = value_date + datetime.timedelta(days=1)
                    cash_settle_date = value_date + 3 * BDay()
                    start_date = previous_twentieth(value_date)
                    sc = SpreadCurve(value_date, yc, start_date,
                                     step_in_date, cash_settle_date,
                                     data.maturity.values, data.coupon.values,
                                     data.close_price.values,
                                     recoveries)
                    for r in data[['coupon', 'tenor', 'close_price']].itertuples():
                        fl, fl_short = fee_legs[r.tenor]
                        cl, cl_short = contingent_legs[r.tenor]

                        duration = fl.pv(value_date, step_in_date,
                                         cash_settle_date, yc, sc,
                                         True)
                        if cl_short.end_date <= value_date.date():
                            theta = None
                        else:
                            pv = index_pv(fl_short, cl_short,
                                          value_date, step_in_date, cash_settle_date, yc, sc,
                                          recoveries[0])
                            theta = r.close_price - pv + r.coupon

                        c.execute("INSERT INTO index_risk VALUES(%s, %s, %s)",
                                  (r.Index, theta, duration))
            conn.commit()
    conn.close()