1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
|
import datetime
import numpy as np
import pandas as pd
from analytics.utils import tenor_t
from pandas.tseries.offsets import BDay
from yieldcurve import get_curve
from db import dbengine
from pyisda.legs import FeeLeg, ContingentLeg
from pyisda.curve import SpreadCurve
from pyisda.date import previous_twentieth
serenitas_engine = dbengine('serenitasdb')
tenors = {"IG": ("3yr", "5yr", "7yr", "10yr"),
"HY": ("3yr", "5yr", "7yr"),
"EU": ("3yr", "5yr", "7yr", "10yr"),
"XO": ("3yr", "5yr", "7yr", "10yr")}
sql_str = "INSERT INTO index_risk VALUES(%s, %s, %s)"
def get_legs(index, series, tenors):
fee_legs = {}
contingent_legs = {}
coupons = []
end_dates = []
df = pd.read_sql_query("SELECT tenor, maturity, coupon, issue_date "
"FROM index_maturity "
"WHERE index=%s AND series=%s and tenor IN %s "
"ORDER BY maturity",
serenitas_engine,
params=(index, series, tenors),
parse_dates=['maturity', 'issue_date'])
df.coupon *= 1e-4
for tenor, maturity, coupon, issue_date in df.itertuples(index=False):
fee_legs[tenor] = FeeLeg(issue_date, maturity, True, 1., coupon * 1e-4)
contingent_legs[tenor] = ContingentLeg(issue_date, maturity, True)
coupons.append(coupon * 1e-4)
# number of seconds since epoch
df.maturity = df.maturity.view(int) // int(86400 * 1e9)
# number of days between 1900-1-1 and epoch
df.maturity += 134774
return fee_legs, contingent_legs, df
def credit_curve(value_date, quotes, end_dates, coupons, recoveries, yc):
step_in_date = value_date + datetime.timedelta(days=1)
cash_settle_date = pd.Timestamp(value_date) + 3 * BDay()
start_date = previous_twentieth(value_date)
sc = SpreadCurve(value_date, yc, start_date,
step_in_date, cash_settle_date,
end_dates, coupons, quotes,
recoveries)
return sc
conn = serenitas_engine.raw_connection()
for index in ["IG", "HY", "EU", "XO"]:
if index in ["HY", "XO"]:
recoveries = np.full(len(tenors[index]), 0.3)
else:
recoveries = np.full(len(tenors[index]), 0.4)
for series in range(18, 31):
if index in ["EU", "XO"] and series == 30:
continue
fee_legs, contingent_legs, df = \
get_legs(index, series, tenors[index])
index_quotes = pd.read_sql_query(
"SELECT id, date, tenor, close_price FROM index_quotes_pre "
"LEFT JOIN index_risk USING (id) "
"WHERE index=%s AND series=%s "
"AND source='MKIT' AND duration is NULL AND tenor IN %s",
serenitas_engine,
params=(index, series, tenors[index]),
parse_dates=['date'],
index_col='id')
if index_quotes.empty:
continue
index_quotes.tenor = index_quotes.tenor.astype(tenor_t)
index_quotes = index_quotes.sort_values('tenor')
index_quotes['close_price'] = 1. - index_quotes['close_price'] / 100
with conn.cursor() as c:
for k, v in index_quotes.groupby('date'):
yc = get_curve(k, "USD" if index in ["IG", "HY"] else "EUR")
v = v.merge(df, on='tenor')
sc = credit_curve(k, v.close_price.values, v.maturity.values,
v.coupon.values, recoveries, yc)
for t in v.tenor.values:
risky_annuity = fee_legs[t].(
k, step_in_date,
cash_settle_date, yc, sc,
False)
conn.commit()
conn.close()
|