aboutsummaryrefslogtreecommitdiffstats
path: root/python/notebooks
diff options
context:
space:
mode:
Diffstat (limited to 'python/notebooks')
-rw-r--r--python/notebooks/Reto Report.ipynb75
1 files changed, 40 insertions, 35 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index 3b97de27..4ced9d4d 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -13,16 +13,18 @@
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import exploration.VaR as var\n",
+ "import analytics\n",
"\n",
"from analytics.curve_trades import curve_pos, on_the_run\n",
"from analytics.index_data import get_index_quotes\n",
- "from analytics.scenarios import run_portfolio_scenarios\n",
+ "from analytics.scenarios import run_portfolio_scenarios, run_swaption_scenarios\n",
"from analytics import Swaption, BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio, ProbSurface, DualCorrTranche\n",
"from db import dbconn, dbengine\n",
"\n",
"conn = dbconn('dawndb')\n",
"dawndb = dbengine('dawndb')\n",
- "serenitasdb = dbengine('serenitasdb')"
+ "serenitasdb = dbengine('serenitasdb')\n",
+ "analytics.init_ontr()"
]
},
{
@@ -123,9 +125,10 @@
"metadata": {},
"outputs": [],
"source": [
- "position_date = (datetime.date.today() - pd.tseries.offsets.BDay(1)).date()\n",
+ "position_date = (datetime.date.today() - pd.tseries.offsets.BDay(2)).date()\n",
+ "spread_date = position_date\n",
"shock_date = (datetime.date.today() - pd.tseries.offsets.BDay(2)).date()\n",
- "(position_date, shock_date)"
+ "(position_date, spread_date, shock_date)"
]
},
{
@@ -135,69 +138,71 @@
"outputs": [],
"source": [
"#Current tranche and swaptions positions\n",
- "t_sql_string = (\"SELECT id, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n",
+ "t_sql_string = (\"SELECT id, folder, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n",
" \"OVER (partition by security_id, attach) AS ntl_agg \"\n",
" \"FROM cds WHERE swap_type='CD_INDEX_TRANCHE' AND termination_cp IS NULL \"\n",
" \"AND trade_date <= %s\")\n",
- "swaption_sql_string = (\"select id, security_desc from swaptions where date(expiration_date) \"\n",
+ "swaption_sql_string = (\"select id, folder, expiration_date from swaptions where date(expiration_date) \"\n",
" \"> %s and swap_type = 'CD_INDEX_OPTION' \"\n",
" \"AND trade_date <= %s AND termination_date iS NULL\")\n",
- "index_sql_string = (\"SELECT id, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n",
+ "index_sql_string = (\"SELECT id, folder, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n",
" \"OVER (partition by security_id, attach) AS ntl_agg \"\n",
" \"FROM cds WHERE swap_type='CD_INDEX' AND termination_cp IS null \"\n",
- " \"AND folder = 'IGOPTDEL' OR folder = 'HYOPTDEL' \"\n",
- " \"AND trade_date <= %s\")\n",
+ " \"AND trade_date <= %s AND maturity > %s\")\n",
"with conn.cursor() as c:\n",
- " #Get Tranche Trade Ids\n",
" c.execute(t_sql_string, (position_date,))\n",
- " t_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n",
- " #Get Swaption Trade Ids\n",
+ " t_trades = [[dealid, f\"{folder}_{dealid}\"] for dealid, folder, ntl in c if ntl != 0]\n",
" c.execute(swaption_sql_string, (position_date, position_date))\n",
- " swaption_trades = c.fetchall()\n",
- " #Get Index/deltas Trade Ids\n",
- " c.execute(index_sql_string, (position_date,))\n",
- " index_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n",
- " \n",
- "portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid in t_trade_ids],\n",
- " ['trn_'+ str(a) for a in t_trade_ids])\n",
- "for row in swaption_trades:\n",
- " option_delta = CreditIndex(row[1].split()[1], row[1].split()[3][1:], '5yr', position_date)\n",
- " option_delta.mark()\n",
- " portf.add_trade(BlackSwaption.from_tradeid(row[0], option_delta), 'opt_' + str(row[0]))\n",
- "for index_id in index_trade_ids:\n",
- " portf.add_trade(CreditIndex.from_tradeid(index_id), 'index_' + str(index_id))\n",
+ " swaption_trades = [[dealid, f\"{folder}_{dealid}\", expiration_date] for dealid, folder, expiration_date in c]\n",
+ " c.execute(index_sql_string, (position_date, position_date))\n",
+ " index_trades = [[dealid, f\"{folder}_{dealid}\"] for dealid, folder, ntl in c if ntl != 0]\n",
+ " \n",
+ "portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid, _ in t_trades],\n",
+ " [trade_id for _, trade_id in t_trades])\n",
+ "for trade_id, name, expiration_date in swaption_trades:\n",
+ " if expiration_date > shock_date:\n",
+ " portf.add_trade(BlackSwaption.from_tradeid(trade_id), name)\n",
+ "for trade_id, name in index_trades:\n",
+ " portf.add_trade(CreditIndex.from_tradeid(trade_id), name)\n",
" \n",
"#get bond risks:\n",
"rmbs_pos = go.rmbs_pos(position_date)\n",
+ "clo_pos = go.clo_pos(position_date)\n",
"r = serenitasdb.execute(\"select duration from on_the_run where index = 'HY' and date = %s\",\n",
- " shock_date)\n",
+ " spread_date)\n",
"duration, = next(r)\n",
"rmbs_pos['hy_equiv'] = rmbs_pos['delta_yield']/duration * 100\n",
- "notional = rmbs_pos['hy_equiv'].sum()\n",
- "notional = 47633776\n",
- "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = shock_date, notional = -notional), 'rmbs_bond')\n",
+ "notional = rmbs_pos['hy_equiv'].sum() + clo_pos['hy_equiv'].sum()\n",
+ "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', \n",
+ " value_date = spread_date, \n",
+ " notional = -notional), 'bonds')\n",
" \n",
- "portf.value_date = shock_date\n",
+ "portf.value_date = spread_date\n",
"portf.mark(interp_method=\"bivariate_linear\")\n",
"portf.reset_pv()\n",
"\n",
"vol_surface = {}\n",
"for trade in portf.swaptions:\n",
" vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n",
- " value_date=shock_date, interp_method = \"bivariate_linear\")\n",
- " vol_surface[trade.index.index_type + trade.index.series] = vs[vs.list(option_type='payer')[-1]]\n",
+ " value_date=spread_date, interp_method = \"bivariate_linear\")\n",
+ " vol_surface[(trade.index.index_type, trade.index.series)] = vs[vs.list(option_type='payer')[-1]]\n",
"vol_shock = [0]\n",
- "corr_shock = [0]\n",
+ "corr_shock = [0, -.1]\n",
"spread_shock = tighten + [0] + widen\n",
"date_range = [pd.Timestamp(shock_date)]\n",
"\n",
- "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\", \"hy_equiv\"],\n",
+ "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\"],\n",
" spread_shock=spread_shock,\n",
" vol_shock=vol_shock,\n",
" corr_shock=corr_shock,\n",
" vol_surface=vol_surface)\n",
"\n",
- "scens.xs('pnl', level=1, axis=1).sum(axis=1)"
+ "attrib = scens.stack(level=0).reset_index()\n",
+ "attrib['strategy'] = attrib['level_4'].str.split('_\\d+', expand=True).iloc[:,0]\n",
+ "attrib = attrib.set_index(['spread_shock', 'strategy', 'corr_shock'])\n",
+ "attrib = attrib.groupby(['spread_shock', 'strategy', 'corr_shock']).sum()\n",
+ "\n",
+ "results = attrib.xs((widen[0], -0.1), level = ['spread_shock','corr_shock'])\n"
]
},
{