1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
|
from index_data import get_index_quotes, index_returns, _serenitas_engine
from analytics import Index, Portfolio
from db import dbengine, dbconn
from pandas.tseries.offsets import BDay, BMonthEnd
from copy import deepcopy
import numpy as np
import pandas as pd
import math
import datetime
import exploration.curve_trades as cv
dawndb = dbengine('dawndb')
serenitasdb = dbengine('serenitasdb')
def on_the_run(index):
r = _serenitas_engine.execute("SELECT max(series) FROM index_version WHERE index=%s",
(index,))
series, = r.fetchone()
return series
def rel_spread_diff(report_date = datetime.date.today(), index='HY', rolling=10):
otr = on_the_run(index)
## look at spreads
df = get_index_quotes(index, list(range(otr - rolling, otr + 1)),
tenor=['3yr', '5yr', '7yr', '10yr'], years=5)
duration = df.duration.xs((report_date,'5yr', otr), level = ['date','tenor', 'series'])[-1]
df = df.xs('5yr', level='tenor')['closespread'].groupby(['date', 'series']).last()
df = df.loc['2013-01-15':report_date]
curr_spread = df.iloc[-1]
df = df.unstack(-1).pct_change(freq='22B').stack()
return df.groupby('date').last(), curr_spread, duration
def get_pos(report_date):
#report_date = datetime.date(2017,10,31)
strats = ["HEDGE_CSO", "HEDGE_MBS", "HEDGE_CLO", "HEDGE_MAC",
"SER_IGINX", "SER_HYINX", "SER_IGCURVE", "SER_ITRXCURVE",
"IGOPTDEL", "HYOPTDEL"]
r = {}
for st in strats:
r[st] = pd.read_sql_query("SELECT * from list_cds_marks(%s, %s)",
dawndb, params=(report_date, st))
return pd.concat(r, names=['strategy']).reset_index()
def cleared_cds_margins(report_date=datetime.date.today()):
df = get_pos(report_date)
#Cap Allocation for Deltas
percentile = .95 #monthly 90%tile case...
shocks, widen, tighten, onTR_dur, onTR_spread = {}, {}, {}, {}, {}
for ind in ['IG', 'HY', 'EU']:
shocks[ind], onTR_spread[ind], onTR_dur[ind] = rel_spread_diff(report_date, index=ind)
widen[ind] = shocks[ind].quantile(percentile)
tighten[ind] = shocks[ind].quantile(1-percentile)
df['onTR_notional'] = df.apply(lambda df:
df.notional * df.factor * df.duration / onTR_dur[df.p_index], axis=1)
df['widen'] = df.apply(lambda df:
df.onTR_notional * onTR_spread[df.p_index] * onTR_dur[df.p_index] * widen[df.p_index]/10000, axis=1)
df['tighten'] = df.apply(lambda df:
df.onTR_notional * onTR_spread[df.p_index] * onTR_dur[df.p_index] * tighten[df.p_index]/10000, axis=1)
delta_alloc = df.groupby('strategy').sum()
delta_alloc['total'] = delta_alloc.apply(lambda df: max(abs(df.widen), abs(df.tighten)), axis=1)
return delta_alloc
def index_curve_margins(report_date=datetime.date.today()):
#Curve - whole curve flattening and steepening
df = get_pos(report_date)
diff = cv.curve_spread_diff()
diff = diff.pct_change(freq='22B').dropna() * diff.iloc[-1]
ctrades = df[df.strategy.isin(['SER_IGCURVE','SER_ITRXCURVE'])]
trades = []
for ind, t in ctrades.iterrows():
trades.append(Index.from_name(t.p_index, t.p_series, t.tenor, report_date, t.notional * (-1 if t.notional < 0 else 1)))
trades[-1].spread = t.closespread
trades[-1].direction = 'Seller' if t.notional > 0 else 'Buyer'
portf = Portfolio(trades)
#uses on-the-run to apply to any-run but let's start there
pv_diff = []
for stress in [diff.quantile(.1), diff.quantile(.9)]:
portf1 = deepcopy(portf)
for trade in portf1.indices:
tenor = trade.name.split()[4]
if tenor == '3Y':
trade.spread -= stress.loc['3-5']
elif tenor == '10Y':
trade.spread += stress.loc['5-10']
pv_diff.append(portf1.pv - portf.pv)
return -min(pv_diff)
|