1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
|
import datetime
import pandas as pd
import numpy as np
import argparse
from risk.portfolio import build_portfolio, generate_vol_surface
import serenitas.analytics as ana
from serenitas.analytics.scenarios import run_portfolio_scenarios
from serenitas.analytics.base import Trade
from serenitas.utils.db2 import dbconn
from serenitas.analytics.dates import prev_business_day
def gen_shocks(portf, shock_date, fund):
Trade.init_ontr(shock_date)
ana._local = False
ontr_spread = Trade._ontr["HY"].spread
spread_shock = np.array([-25.0, 1.0, +25.0, 100.0, 200.0, 500, 1000])
spread_shock /= ontr_spread
# Add in 2020 HY Wides, 2021 HY Tights, 2022 HY Wides scenarios
historic_spreads = np.array([872, 269, 626])
spread_shock = np.append(spread_shock, historic_spreads / ontr_spread - 1.0)
vol_surface = generate_vol_surface(portf, lookback=10, source="BAML")
portf.reset_pv()
scens = run_portfolio_scenarios(
portf,
date_range=[pd.Timestamp(shock_date)],
params=["pnl", "hy_equiv"],
spread_shock=spread_shock,
vol_shock=[0.0],
corr_shock=[0.0],
vol_surface=vol_surface,
)
strategies = {
s: "options"
for s in ["HYOPTDEL", "HYPAYER", "HYREC", "IGOPTDEL", "IGPAYER", "IGREC"]
} | {
s: "tranches"
for s in [
"HYSNR",
"HYMEZ",
"HYINX",
"HYEQY",
"IGSNR",
"IGMEZ",
"IGINX",
"IGEQY",
"EUSNR",
"EUMEZ",
"EUINX",
"EUEQY",
"XOSNR",
"XOMEZ",
"XOINX",
"XOEQY",
"BSPK",
]
}
if fund == "BRINKER":
scens = scens.xs(0, level="corr_shock")
else:
scens = scens.xs((0.0, 0.0), level=["vol_shock", "corr_shock"])
scens.columns.names = ["strategy", "trade_id", "scen_type"]
results = scens.stack(level="scen_type").reorder_levels([2, 0, 1]).sort_index()
results = results.groupby(["strategy"], axis=1).sum()
results = results.groupby(lambda s: strategies.get(s, s), axis=1).sum()
# map shocks back to absolute spread diff
results.index = results.index.set_levels(
results.index.levels[2] * ontr_spread, level="spread_shock"
)
results["total"] = results.sum(axis=1)
results = results.stack().reset_index()
results.scen_type = results.scen_type.str.upper()
results.insert(0, "date", results.pop("date"))
return results
def save_shocks(date, df, fund):
with conn.cursor() as c:
c.execute(
"DELETE FROM shocks WHERE fund=%s AND date=%s",
(
fund,
args.date,
),
)
conn.commit()
with conn.cursor() as c:
c.executemany(
"INSERT INTO shocks VALUES (%s, %s, %s, %s, %s, %s)",
[(*t, fund) for t in df.itertuples(index=False)],
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Shock data and insert into DB")
parser.add_argument(
"date",
nargs="?",
type=datetime.date.fromisoformat,
default=prev_business_day(datetime.date.today()),
)
parser.add_argument("-n", "--no-upload", action="store_true", help="do not upload")
args = parser.parse_args()
conn = dbconn("dawndb")
for fund in ("SERCGMAST", "BOWDST", "ISOSEL", "BRINKER"):
portf, _ = build_portfolio(args.date, args.date, fund)
shocks = gen_shocks(portf, args.date, fund)
save_shocks(args.date, shocks, fund)
|