aboutsummaryrefslogtreecommitdiffstats
path: root/python/notebooks
diff options
context:
space:
mode:
Diffstat (limited to 'python/notebooks')
-rw-r--r--python/notebooks/Reto Report.ipynb270
1 files changed, 104 insertions, 166 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index f05d63b2..f3910b3b 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -16,13 +16,14 @@
"from analytics.index_data import get_index_quotes\n",
"from analytics.scenarios import run_portfolio_scenarios\n",
"from analytics.utils import run_local\n",
- "from analytics import BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio,DualCorrTranche\n",
+ "from analytics import BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio, DualCorrTranche\n",
"from copy import deepcopy\n",
"from analytics.curve_trades import on_the_run\n",
- "from risk.tranches import get_tranche_portfolio\n",
- "from risk.swaptions import get_swaption_portfolio\n",
- "from risk.bonds import subprime_risk, clo_risk, crt_risk\n",
- "from utils.db import dbconn, dbengine, serenitas_engine, dawn_engine"
+ "from risk.bonds import subprime_risk\n",
+ "from utils.db import dbconn, dbengine, serenitas_engine, dawn_engine\n",
+ "from risk.portfolio import build_portf, generate_vol_surface\n",
+ "from analytics.tranche_basket import DualCorrTranche, TrancheBasket, MarkitTrancheBasket, Skew\n",
+ "from analytics.basket_index import BasketIndex"
]
},
{
@@ -32,13 +33,101 @@
"outputs": [],
"source": [
"#Set dates\n",
- "analytics._local = False\n",
"position_date = (datetime.date.today() - BDay(1)).date()\n",
"spread_date = position_date\n",
- "analytics.init_ontr(spread_date)\n",
- "date = datetime.date.today() - BDay(1)\n",
- "report_date = date - MonthEnd(1)\n",
- "report_date"
+ "analytics._local = False\n",
+ "analytics.init_ontr(spread_date)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#Stress scenario for weekly report --> copy paste results to Excel\n",
+ "spread_shock = [100, 200]\n",
+ "spread_shock = [x / analytics._ontr['HY'].spread for x in spread_shock]\n",
+ "portf, _ = build_portf(position_date, spread_date)\n",
+ "vol_surface = generate_vol_surface(portf, 5)\n",
+ "\n",
+ "portf.reset_pv()\n",
+ "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=[\"pnl\"],\n",
+ " spread_shock=spread_shock,\n",
+ " vol_shock=[0, .3],\n",
+ " corr_shock = [0],\n",
+ " vol_surface=vol_surface)\n",
+ "\n",
+ "attrib = (scens.\n",
+ " reset_index(level=['date'], drop=True).\n",
+ " groupby(level=0, axis=1).sum())\n",
+ "results = attrib.xs((spread_shock[1], 0., 0), level=['spread_shock', 'corr_shock', 'vol_shock']).T\n",
+ "results.to_clipboard()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "################################### JTD\n",
+ "_, portf = build_portf(position_date, spread_date)\n",
+ "jtd_i = []\n",
+ "for t in portf.indices:\n",
+ " bkt = BasketIndex(t.index_type, t.series, [t.tenor])\n",
+ " spreads = pd.DataFrame(bkt.spreads() * 10000, index=pd.Index(bkt.tickers, name='ticker'), columns=['spread'])\n",
+ " jump = pd.merge(spreads, bkt.jump_to_default() * t.notional, left_index=True, right_index=True)\n",
+ " jtd_i.append(jump.rename(columns={jump.columns[1]: 'jtd'}))\n",
+ "jtd_t = []\n",
+ "for t in portf.tranches:\n",
+ " jump = pd.concat([t.singlename_spreads().reset_index(['seniority', 'doc_clause'], drop=True), t.jump_to_default().rename('jtd')], axis=1)\n",
+ " jtd_t.append(jump.drop(['weight', 'recovery'], axis=1))\n",
+ "\n",
+ "ref_names = pd.read_sql_query(\"select ticker, referenceentity from refentity\", dbconn('serenitasdb'), index_col='ticker')\n",
+ "jump = pd.concat([pd.concat(jtd_t), pd.concat(jtd_i)])\n",
+ "jump = jump.merge(ref_names, left_index=True, right_index=True)\n",
+ "jump.groupby('referenceentity').agg({'spread': np.mean, 'jtd': np.sum}).sort_values(by='jtd', ascending=True)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "################################### Run set of scenario\n",
+ "spread_shock = [-100, -25, 1, +25 , 100]\n",
+ "spread_shock = [x / analytics._ontr['HY'].spread for x in spread_shock]\n",
+ "portf = build_portf(position_date, spread_date)\n",
+ "vol_surface = generate_vol_surface(portf, 5)\n",
+ "portf.reset_pv()\n",
+ "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=['pnl'],\n",
+ " spread_shock=spread_shock,\n",
+ " vol_shock=[0.0],\n",
+ " corr_shock=[0.0],\n",
+ " vol_surface=vol_surface)\n",
+ "\n",
+ "pnl = scens.xs('pnl', axis=1, level=2)\n",
+ "pnl = pnl.xs((vol_shock, corr_shock), level=['vol_shock', 'corr_shock'])\n",
+ "\n",
+ "scenarios = (pnl.\n",
+ " reset_index(level=['date'], drop=True).\n",
+ " groupby(level=0, axis=1).sum())\n",
+ "\n",
+ "options = ['HYOPTDEL', 'HYPAYER', 'HYREC', 'IGOPTDEL', 'IGPAYER', 'IGREC']\n",
+ "tranches = ['HYMEZ', 'HYINX', 'HYEQY', 'IGMEZ', 'IGINX', 'IGEQY', 'IGSNR', 'IGINX', 'BSPK', 'XOMEZ', 'XOINX', 'EUMEZ']\n",
+ "hedges = ['HEDGE_CLO', 'HEDGE_MAC', 'HEDGE_MBS']\n",
+ "\n",
+ "synthetic =pd.DataFrame()\n",
+ "synthetic['options'] = scenarios[set(scenarios.columns).intersection(options)].sum(axis=1)\n",
+ "synthetic['tranches'] = scenarios[set(scenarios.columns).intersection(tranches)].sum(axis=1)\n",
+ "synthetic['curve_trades'] = scenarios['curve_trades']\n",
+ "synthetic['total'] = synthetic.sum(axis = 1)\n",
+ "nav = go.get_net_navs()\n",
+ "#(synthetic/nav.endbooknav[-1])\n",
+ "scenarios.sum(axis=1)\n",
+ "scenarios.sum(axis=1).to_clipboard()"
]
},
{
@@ -59,8 +148,8 @@
"pnl_alloc = pnl_alloc.join(nav.begbooknav)\n",
"pnl_alloc['strat_return'] = pnl_alloc.mtdtotalbookpl / pnl_alloc.begbooknav\n",
"#rolling 12 months PNL per strategy - copy to RiskMonitor\n",
- "start_date = report_date - pd.tseries.offsets.MonthEnd(11)\n",
- "rolling_return = pnl_alloc[start_date:report_date].groupby('pnl').sum()['strat_return']\n",
+ "start_date = position_date - MonthEnd(1) - pd.tseries.offsets.MonthEnd(11)\n",
+ "rolling_return = pnl_alloc[start_date:position_date - MonthEnd(1)].groupby('pnl').sum()['strat_return']\n",
"rolling_return.to_clipboard()"
]
},
@@ -221,7 +310,6 @@
"outputs": [],
"source": [
"################################## Calculate Historical Bond Duration/Yield\n",
- "analytics.init_ontr()\n",
"mysql_engine = dbengine('rmbs_model')\n",
"dates = pd.date_range(datetime.date(2013, 1, 30), pd.datetime.today() - MonthEnd(1), freq=\"M\")\n",
"calc_df = pd.DataFrame()\n",
@@ -320,7 +408,6 @@
"outputs": [],
"source": [
"#Historical max widening\n",
- "analytics.init_ontr()\n",
"df = get_index_quotes('HY', list(range(on_the_run('HY', spread_date) - 10, on_the_run('HY', spread_date) + 1)),\n",
" tenor=['5yr'], years=5)\n",
"df = df.xs('5yr', level='tenor')['close_spread'].groupby(['date', 'series']).last()\n",
@@ -342,7 +429,7 @@
"outputs": [],
"source": [
"################################## Historical Notioinals and HY Equiv\n",
- "dates = pd.date_range(datetime.date(2013, 1, 30), pd.datetime.today() - MonthEnd(1), freq=\"BM\")\n",
+ "dates = pd.date_range(datetime.date(2013, 1, 30), datetime.datetime.today() - MonthEnd(1), freq=\"BM\")\n",
"#look for a day with HY quotes... we need that to construct HY Equiv\n",
"sql_string = 'select distinct(date) from index_quotes where index = %s order by date asc'\n",
"hy_dates = pd.read_sql_query(sql_string, serenitas_engine, parse_dates = 'date', params=['HY',])\n",
@@ -361,163 +448,14 @@
"execution_count": null,
"metadata": {},
"outputs": [],
- "source": [
- "#### Function to build portfolio\n",
- "def build_portf(position_date, spread_date=None):\n",
- " analytics.init_ontr()\n",
- " if spread_date is None:\n",
- " spread_date=position_date\n",
- " conn = dawn_engine.raw_connection()\n",
- " conn.autocommit = True\n",
- " \n",
- " mysql_engine = dbengine('rmbs_model')\n",
- " mysqlcrt_engine = dbengine('crt')\n",
- " \n",
- " on_the_run_index = analytics._ontr['HY']\n",
- " on_the_run_index.value_date = position_date\n",
- "\n",
- " #tranche positions\n",
- " portf = get_tranche_portfolio(position_date, conn, False, 'SERCGMAST')\n",
- " #swaption positions\n",
- " s_portf = get_swaption_portfolio(position_date, conn)\n",
- " if bool(s_portf):\n",
- " for t, id in zip(s_portf.trades, s_portf.trade_ids):\n",
- " portf.add_trade(t, id)\n",
- "\n",
- " #index positions\n",
- " df = pd.read_sql_query(\"SELECT * from list_cds_positions_by_strat(%s)\",\n",
- " dawn_engine, params=(position_date,))\n",
- " if not(df.empty):\n",
- " df_no_curve = df[~df.folder.str.contains(\"CURVE\")]\n",
- " for t in df_no_curve.itertuples(index=False):\n",
- " portf.add_trade(CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional),\n",
- " (t.folder, t.security_desc))\n",
- "\n",
- " #separately add in curve delta\n",
- " df_curve = df[df.folder.str.contains(\"CURVE\")]\n",
- " curve_portf = Portfolio([CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional)\n",
- " for t in df_curve.itertuples(index=False)])\n",
- " curve_portf.value_date = spread_date\n",
- " curve_portf.mark()\n",
- "\n",
- " hyontr = deepcopy(on_the_run_index)\n",
- " hyontr.notional = curve_portf.hy_equiv\n",
- " portf.add_trade(hyontr, ('curve_trades', ''))\n",
- "\n",
- " #get bond risks:\n",
- " sql_string = (\"SELECT distinct timestamp::date FROM priced where normalization = 'current_notional' and model_version = 1 \"\n",
- " \"and date(timestamp) <= %s and date(timestamp) >= %s order by timestamp desc\")\n",
- " with dbconn('etdb') as etconn, dbconn('dawndb') as dawnconn:\n",
- " timestamps = pd.read_sql_query(sql_string, dawn_engine, parse_dates=[\"timestamp\"], \n",
- " params=[position_date, position_date - pd.tseries.offsets.DateOffset(15, \"D\")])\n",
- " rmbs_pos = subprime_risk(position_date, dawnconn, mysql_engine, timestamps.iloc[0][0].date())\n",
- " clo_pos = clo_risk(position_date, dawnconn, etconn)\n",
- " crt_pos = crt_risk(position_date, dawnconn, mysqlcrt_engine, model_version = 'hpi5_ir3_btm')\n",
- " rmbs_notional = 0\n",
- " for pos in [rmbs_pos, crt_pos]:\n",
- " rmbs_notional += pos['hy_equiv'].sum() if pos is not None else 0\n",
- " hyontr_rmbs = deepcopy(on_the_run_index)\n",
- " hyontr_rmbs.notional = -rmbs_notional\n",
- " portf.add_trade(hyontr_rmbs, ('rmbs_bonds', ''))\n",
- " if isinstance(clo_pos, pd.DataFrame):\n",
- " hyontr_clos = deepcopy(on_the_run_index)\n",
- " hyontr_clos.notional = -clo_pos['hy_equiv'].sum()\n",
- " portf.add_trade(hyontr_clos, ('clo_bonds', ''))\n",
- " \n",
- " portf.value_date = spread_date\n",
- " portf.mark(interp_method=\"bivariate_linear\")\n",
- " portf.reset_pv()\n",
- " \n",
- " return portf"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "#tranche/swaption positions\n",
- "curr_spread = analytics._ontr['HY'].spread\n",
- "spread_shock = [100, 200]\n",
- "spread_shock = [x / curr_spread for x in spread_shock]\n",
- "portf = build_portf(position_date, spread_date)\n",
- "\n",
- "vol_surface = {}\n",
- "for trade in portf.swaptions:\n",
- " try:\n",
- " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n",
- " value_date=spread_date, interp_method = \"bivariate_linear\")\n",
- " except:\n",
- " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n",
- " value_date=spread_date- BDay(4), interp_method = \"bivariate_linear\")\n",
- " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(source='MS', option_type=trade.option_type)[-1]]\n",
- "\n",
- "portf.reset_pv()\n",
- "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=[\"pnl\"],\n",
- " spread_shock=spread_shock,\n",
- " vol_shock=[0, .3],\n",
- " corr_shock = [0],\n",
- " vol_surface=vol_surface)\n",
- "\n",
- "attrib = (scens.\n",
- " reset_index(level=['date'], drop=True).\n",
- " groupby(level=0, axis=1).sum())\n",
- "results = attrib.xs((spread_shock[1], 0., 0), level=['spread_shock', 'corr_shock', 'vol_shock']).T"
- ]
+ "source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
- "source": [
- "################################### Run set of scenario\n",
- "curr_spread = analytics._ontr['HY'].spread\n",
- "spread_shock = [-100, -25, 1, +25 , 100]\n",
- "spread_shock = [x / curr_spread for x in spread_shock]\n",
- "vol_shock = 0.0\n",
- "corr_shock = 0\n",
- "portf = build_portf(position_date, spread_date)\n",
- "\n",
- "vol_surface = {}\n",
- "for trade in portf.swaptions:\n",
- " try:\n",
- " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n",
- " value_date=spread_date, interp_method = \"bivariate_linear\")\n",
- " except:\n",
- " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n",
- " value_date=spread_date- BDay(4), interp_method = \"bivariate_linear\")\n",
- " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(source='MS', option_type=trade.option_type)[-1]]\n",
- "\n",
- "portf.reset_pv()\n",
- "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=['pnl'],\n",
- " spread_shock=spread_shock,\n",
- " vol_shock=[vol_shock],\n",
- " corr_shock=[corr_shock],\n",
- " vol_surface=vol_surface)\n",
- "\n",
- "pnl = scens.xs('pnl', axis=1, level=2)\n",
- "pnl = pnl.xs((vol_shock, corr_shock), level=['vol_shock', 'corr_shock'])\n",
- "\n",
- "scenarios = (pnl.\n",
- " reset_index(level=['date'], drop=True).\n",
- " groupby(level=0, axis=1).sum())\n",
- "\n",
- "options = ['HYOPTDEL', 'HYPAYER', 'HYREC', 'IGOPTDEL', 'IGPAYER', 'IGREC']\n",
- "tranches = ['HYMEZ', 'HYINX', 'HYEQY', 'IGMEZ', 'IGINX', 'IGEQY', 'IGSNR', 'IGINX', 'BSPK', 'XOMEZ', 'XOINX', 'EUMEZ']\n",
- "hedges = ['HEDGE_CLO', 'HEDGE_MAC', 'HEDGE_MBS']\n",
- "\n",
- "synthetic =pd.DataFrame()\n",
- "synthetic['options'] = scenarios[set(scenarios.columns).intersection(options)].sum(axis=1)\n",
- "synthetic['tranches'] = scenarios[set(scenarios.columns).intersection(tranches)].sum(axis=1)\n",
- "synthetic['curve_trades'] = scenarios['curve_trades']\n",
- "synthetic['total'] = synthetic.sum(axis = 1)\n",
- "nav = go.get_net_navs()\n",
- "#(synthetic/nav.endbooknav[-1])\n",
- "scenarios.sum(axis=1)\n",
- "scenarios.sum(axis=1).to_clipboard()"
- ]
+ "source": []
},
{
"cell_type": "code",