aboutsummaryrefslogtreecommitdiffstats
path: root/python/notebooks/Reto Report.ipynb
diff options
context:
space:
mode:
Diffstat (limited to 'python/notebooks/Reto Report.ipynb')
-rw-r--r--python/notebooks/Reto Report.ipynb46
1 files changed, 31 insertions, 15 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index 6e62b252..3b97de27 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -123,6 +123,17 @@
"metadata": {},
"outputs": [],
"source": [
+ "position_date = (datetime.date.today() - pd.tseries.offsets.BDay(1)).date()\n",
+ "shock_date = (datetime.date.today() - pd.tseries.offsets.BDay(2)).date()\n",
+ "(position_date, shock_date)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
"#Current tranche and swaptions positions\n",
"t_sql_string = (\"SELECT id, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n",
" \"OVER (partition by security_id, attach) AS ntl_agg \"\n",
@@ -138,50 +149,55 @@
" \"AND trade_date <= %s\")\n",
"with conn.cursor() as c:\n",
" #Get Tranche Trade Ids\n",
- " c.execute(t_sql_string, (date,))\n",
+ " c.execute(t_sql_string, (position_date,))\n",
" t_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n",
" #Get Swaption Trade Ids\n",
- " c.execute(swaption_sql_string, (date, date))\n",
+ " c.execute(swaption_sql_string, (position_date, position_date))\n",
" swaption_trades = c.fetchall()\n",
" #Get Index/deltas Trade Ids\n",
- " c.execute(index_sql_string, (date,))\n",
+ " c.execute(index_sql_string, (position_date,))\n",
" index_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n",
" \n",
"portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid in t_trade_ids],\n",
- " t_trade_ids)\n",
+ " ['trn_'+ str(a) for a in t_trade_ids])\n",
"for row in swaption_trades:\n",
- " option_delta = CreditIndex(row[1].split()[1], row[1].split()[3][1:], '5yr', date)\n",
+ " option_delta = CreditIndex(row[1].split()[1], row[1].split()[3][1:], '5yr', position_date)\n",
" option_delta.mark()\n",
" portf.add_trade(BlackSwaption.from_tradeid(row[0], option_delta), 'opt_' + str(row[0]))\n",
"for index_id in index_trade_ids:\n",
" portf.add_trade(CreditIndex.from_tradeid(index_id), 'index_' + str(index_id))\n",
" \n",
- "#Update manually - positive notional = long risk\n",
- "non_trancheSwap_risk_notional = 49119912 \n",
- "\n",
- "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = date, notional = -non_trancheSwap_risk_notional), 'bond')\n",
+ "#get bond risks:\n",
+ "rmbs_pos = go.rmbs_pos(position_date)\n",
+ "r = serenitasdb.execute(\"select duration from on_the_run where index = 'HY' and date = %s\",\n",
+ " shock_date)\n",
+ "duration, = next(r)\n",
+ "rmbs_pos['hy_equiv'] = rmbs_pos['delta_yield']/duration * 100\n",
+ "notional = rmbs_pos['hy_equiv'].sum()\n",
+ "notional = 47633776\n",
+ "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = shock_date, notional = -notional), 'rmbs_bond')\n",
" \n",
- "portf.value_date = date\n",
+ "portf.value_date = shock_date\n",
"portf.mark(interp_method=\"bivariate_linear\")\n",
"portf.reset_pv()\n",
"\n",
"vol_surface = {}\n",
"for trade in portf.swaptions:\n",
" vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n",
- " value_date=date, interp_method = \"bivariate_linear\")\n",
+ " value_date=shock_date, interp_method = \"bivariate_linear\")\n",
" vol_surface[trade.index.index_type + trade.index.series] = vs[vs.list(option_type='payer')[-1]]\n",
"vol_shock = [0]\n",
"corr_shock = [0]\n",
- "spread_shock = widen + tighten\n",
- "date_range = [pd.Timestamp(date)]\n",
+ "spread_shock = tighten + [0] + widen\n",
+ "date_range = [pd.Timestamp(shock_date)]\n",
"\n",
- "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\"],\n",
+ "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\", \"hy_equiv\"],\n",
" spread_shock=spread_shock,\n",
" vol_shock=vol_shock,\n",
" corr_shock=corr_shock,\n",
" vol_surface=vol_surface)\n",
"\n",
- "scens.sum(axis=1)"
+ "scens.xs('pnl', level=1, axis=1).sum(axis=1)"
]
},
{