aboutsummaryrefslogtreecommitdiffstats
path: root/python/notebooks
diff options
context:
space:
mode:
Diffstat (limited to 'python/notebooks')
-rw-r--r--python/notebooks/Reto Report.ipynb132
1 files changed, 101 insertions, 31 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index 2e1eb626..c1170b16 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -249,8 +249,7 @@
"################################## Calculate Historical Bond Duration/Yield\n",
"analytics.init_ontr()\n",
"mysql_engine = dbengine('rmbs_model')\n",
- "end_date = pd.datetime.today() - MonthEnd(1)\n",
- "dates = pd.date_range(datetime.date(2013, 1, 30), end_date, freq=\"M\")\n",
+ "dates = pd.date_range(datetime.date(2013, 1, 30), pd.datetime.today() - MonthEnd(1), freq=\"M\")\n",
"calc_df = pd.DataFrame()\n",
"sql_string = (\"SELECT distinct timestamp::date FROM priced where normalization = 'current_notional' and model_version = 1 \"\n",
" \"and date(timestamp) < %s and date(timestamp) > %s order by timestamp desc\")\n",
@@ -291,48 +290,119 @@
"metadata": {},
"outputs": [],
"source": [
+ "################################## FX Exposure, any net CAD/EUR exposures are FX exposure. \n",
+ "################### doesn't add up to 1 including the USD as we now sum up all the NAVs after adjusting the Futures\n",
+ "nav = go.get_net_navs()\n",
+ "portfolio = go.get_portfolio()\n",
+ "monthend_portfolio = portfolio.groupby(pd.Grouper(freq=\"M\"), group_keys=False).apply(\n",
+ " lambda df: df.loc[df.index[-1]]\n",
+ " )\n",
+ "#Adjust the endbooknav of futures\n",
+ "tickers = ['CD_CME', 'EC_CME']\n",
+ "factors = [100000, 125000]\n",
+ "currency = ['CAD', 'EUR']\n",
+ "for a, b, c in zip(tickers, factors, currency):\n",
+ " new_endbooknav = monthend_portfolio['endqty'] * monthend_portfolio['endlocalmarketprice'] * b\n",
+ " monthend_portfolio['endbooknav'] = new_endbooknav.where(monthend_portfolio['invid'].str.contains(a), monthend_portfolio['endbooknav'])\n",
+ " monthend_portfolio.loc[monthend_portfolio['invid'].str.contains(a), 'invccy'] = c\n",
+ "\n",
+ "monthend_portfolio = monthend_portfolio.merge(nav, left_index=True, right_index=True, suffixes=('_inst', '_fund'))\n",
+ "monthend_portfolio.index.name = 'periodenddate'\n",
+ "monthend_portfolio['percent_nav'] = monthend_portfolio['endbooknav_inst']/monthend_portfolio['endbooknav_fund']\n",
+ "\n",
+ "collateral_filter =monthend_portfolio['invid'].str.contains('LOAN')\n",
+ "futures_filter = monthend_portfolio['invid'].str.contains('|'.join(tickers))\n",
+ "cash_filter = ((monthend_portfolio['invid'] == 'CAD') | (monthend_portfolio['invid'] == 'EUR'))\n",
+ "trades = monthend_portfolio[(~futures_filter) & (~collateral_filter) & (~cash_filter)]\n",
+ "names = ['collateral', 'futures', 'cash', 'trades']\n",
+ "categories = [monthend_portfolio[collateral_filter], \n",
+ " monthend_portfolio[futures_filter], \n",
+ " monthend_portfolio[cash_filter],\n",
+ " trades]\n",
+ "exposure = {}\n",
+ "for n, x in zip(names, categories):\n",
+ " exposure[n] = x.groupby(['periodenddate', 'invccy']).sum()\n",
+ "exposure = pd.concat(exposure)['percent_nav']\n",
+ "exposure.unstack(level=1).T.to_clipboard()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "################################## Historical Notioinals and HY Equiv\n",
+ "dates = pd.date_range(datetime.date(2013, 1, 30), pd.datetime.today() - MonthEnd(1), freq=\"BM\")\n",
+ "portfs = {}\n",
+ "for d in dates:\n",
+ " d = d.date()\n",
+ " portfs[d] = build_portf(d)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
"##################################\n",
- "def build_portf(position_date, spread_date):\n",
+ "def build_portf(position_date, spread_date=None):\n",
+ " if spread_date is None:\n",
+ " spread_date=position_date\n",
" conn = dawn_engine.raw_connection()\n",
" mysql_engine = dbengine('rmbs_model')\n",
" mysqlcrt_engine = dbengine('crt')\n",
+ " \n",
+ " on_the_run_index = CreditIndex('HY', on_the_run('HY', position_date), '5yr', value_date=position_date)\n",
"\n",
+ " #tranche positions\n",
" portf = get_tranche_portfolio(position_date, conn, False, 'SERCGMAST')\n",
+ " #swaption positions\n",
" s_portf = get_swaption_portfolio(position_date, conn)\n",
- " for t, id in zip(s_portf.trades, s_portf.trade_ids):\n",
- " portf.add_trade(t, id)\n",
+ " if bool(s_portf):\n",
+ " for t, id in zip(s_portf.trades, s_portf.trade_ids):\n",
+ " portf.add_trade(t, id)\n",
"\n",
" #index positions\n",
" df = pd.read_sql_query(\"SELECT * from list_cds_positions_by_strat(%s)\",\n",
- " dawn_engine, params=(position_date,))\n",
- " df_no_curve = df[~df.folder.str.contains(\"CURVE\")]\n",
- " for t in df_no_curve.itertuples(index=False):\n",
- " portf.add_trade(CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional),\n",
- " (t.folder, t.security_desc))\n",
+ " dawn_engine, params=(position_date,))\n",
+ " if not(df.empty):\n",
+ " df_no_curve = df[~df.folder.str.contains(\"CURVE\")]\n",
+ " for t in df_no_curve.itertuples(index=False):\n",
+ " portf.add_trade(CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional),\n",
+ " (t.folder, t.security_desc))\n",
"\n",
- " #separately add in curve delta\n",
- " df_curve = df[df.folder.str.contains(\"CURVE\")]\n",
- " curve_portf = Portfolio([CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional)\n",
- " for t in df_curve.itertuples(index=False)])\n",
- " curve_portf.value_date = spread_date\n",
- " curve_portf.mark()\n",
- " \n",
- " hyontr = deepcopy(analytics._ontr)\n",
- " hyontr.notional = curve_portf.hy_equiv\n",
- " portf.add_trade(hyontr, ('curve_trades', ''))\n",
+ " #separately add in curve delta\n",
+ " df_curve = df[df.folder.str.contains(\"CURVE\")]\n",
+ " curve_portf = Portfolio([CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional)\n",
+ " for t in df_curve.itertuples(index=False)])\n",
+ " curve_portf.value_date = spread_date\n",
+ " curve_portf.mark()\n",
+ "\n",
+ " hyontr = deepcopy(on_the_run_index)\n",
+ " hyontr.notional = curve_portf.hy_equiv\n",
+ " portf.add_trade(hyontr, ('curve_trades', ''))\n",
"\n",
" #get bond risks:\n",
+ " sql_string = (\"SELECT distinct timestamp::date FROM priced where normalization = 'current_notional' and model_version = 1 \"\n",
+ " \"and date(timestamp) < %s and date(timestamp) > %s order by timestamp desc\")\n",
" with dbconn('etdb') as etconn, dbconn('dawndb') as dawnconn:\n",
- " rmbs_pos = subprime_risk(position_date, dawnconn, mysql_engine)\n",
+ " timestamps = pd.read_sql_query(sql_string, dawn_engine, parse_dates=[\"timestamp\"], \n",
+ " params=[position_date, position_date - pd.tseries.offsets.DateOffset(15, \"D\")])\n",
+ " rmbs_pos = subprime_risk(position_date, dawnconn, mysql_engine, timestamps.iloc[0][0].date())\n",
" clo_pos = clo_risk(position_date, dawnconn, etconn)\n",
" crt_pos = crt_risk(position_date, dawnconn, mysqlcrt_engine)\n",
- " notional = 0\n",
- " for pos in [rmbs_pos, clo_pos, crt_pos]:\n",
- " notional += pos['hy_equiv'].sum() if pos is not None else 0\n",
- " \n",
- " hyontr_1 = deepcopy(analytics._ontr)\n",
- " hyontr_1.notional = -notional\n",
- " portf.add_trade(hyontr_1, ('bonds', ''))\n",
+ " rmbs_notional = 0\n",
+ " for pos in [rmbs_pos, crt_pos]:\n",
+ " rmbs_notional += pos['hy_equiv'].sum() if pos is not None else 0\n",
+ " hyontr_rmbs = deepcopy(on_the_run_index)\n",
+ " hyontr_rmbs.notional = -rmbs_notional\n",
+ " portf.add_trade(hyontr_rmbs, ('rmbs_bonds', ''))\n",
+ " if isinstance(clo_pos, pd.DataFrame):\n",
+ " hyontr_clos = deepcopy(on_the_run_index)\n",
+ " hyontr_clos.notional = -clo_pos['hy_equiv'].sum()\n",
+ " portf.add_trade(hyontr_clos, ('clo_bonds', ''))\n",
" \n",
" portf.value_date = spread_date\n",
" portf.mark(interp_method=\"bivariate_linear\")\n",
@@ -349,8 +419,8 @@
"source": [
"################################### Calculate stress scenario \n",
"position_date = (datetime.date.today() - BDay(1)).date()\n",
- "spread_date = (datetime.date.today() - BDay(1)).date()\n",
- "analytics.init_ontr(spread_date)"
+ "spread_date = position_date\n",
+ "#analytics.init_ontr(spread_date)"
]
},
{
@@ -391,7 +461,7 @@
" except:\n",
" vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series + 1, \n",
" value_date=spread_date, interp_method = \"bivariate_linear\")\n",
- " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(source='MS', option_type=trade.option_type)[-1]]\n",
+ " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(source='GS', option_type=trade.option_type)[-1]]\n",
"\n",
"scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=[\"pnl\"],\n",
" spread_shock=widen,\n",