aboutsummaryrefslogtreecommitdiffstats
path: root/python/notebooks
diff options
context:
space:
mode:
Diffstat (limited to 'python/notebooks')
-rw-r--r--python/notebooks/Reto Report.ipynb166
1 files changed, 45 insertions, 121 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index 1a2873a2..a03c2e0f 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -29,7 +29,7 @@
"outputs": [],
"source": [
"#Set dates\n",
- "position_date = (datetime.date.today() - BDay(3)).date()\n",
+ "position_date = (datetime.date.today() - BDay(1)).date()\n",
"spread_date = position_date\n",
"ana._local = False\n",
"ana.init_ontr(spread_date)"
@@ -41,99 +41,41 @@
"metadata": {},
"outputs": [],
"source": [
- "#Stress scenario for weekly report --> copy paste results to Excel\n",
- "fund = 'SERCGMAST'\n",
- "spread_shock = np.array([100., 200.])\n",
+ "################################### Run scenarios\n",
+ "spread_shock = np.array([-100., -25., 1., +25. , 100., 200])\n",
"spread_shock /= ana._ontr['HY'].spread\n",
- "portf, _ = build_portfolio(position_date, spread_date, fund)\n",
- "vol_surface = generate_vol_surface(portf, 10)\n",
- "\n",
- "portf.reset_pv()\n",
- "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=[\"pnl\"],\n",
- " spread_shock=spread_shock,\n",
- " vol_shock=[0, .3],\n",
- " corr_shock = [0],\n",
- " vol_surface=vol_surface)\n",
- "\n",
- "attrib = (scens.\n",
- " reset_index(level=['date'], drop=True).\n",
- " groupby(level=0, axis=1).sum())\n",
- "results = attrib.xs((spread_shock[1], 0., 0), level=['spread_shock', 'corr_shock', 'vol_shock']).T\n",
- "results.to_clipboard()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "position_date"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "################################### JTD\n",
- "_, portf = build_portfolio(position_date, spread_date)\n",
- "jtd_i = []\n",
- "for t in portf.indices:\n",
- " bkt = BasketIndex(t.index_type, t.series, [t.tenor])\n",
- " spreads = pd.DataFrame(bkt.spreads() * 10000, index=pd.Index(bkt.tickers, name='ticker'), columns=['spread'])\n",
- " jump = pd.merge(spreads, bkt.jump_to_default() * t.notional, left_index=True, right_index=True)\n",
- " jtd_i.append(jump.rename(columns={jump.columns[1]: 'jtd'}))\n",
- "jtd_t = []\n",
- "for t in portf.tranches:\n",
- " jump = pd.concat([t.singlename_spreads().reset_index(['seniority', 'doc_clause'], drop=True), t.jump_to_default().rename('jtd')], axis=1)\n",
- " jtd_t.append(jump.drop(['weight', 'recovery'], axis=1))\n",
- "\n",
- "ref_names = pd.read_sql_query(\"select ticker, referenceentity from refentity\", dbconn('serenitasdb'), index_col='ticker')\n",
- "jump = pd.concat([pd.concat(jtd_t), pd.concat(jtd_i)])\n",
- "jump = jump.merge(ref_names, left_index=True, right_index=True)\n",
- "jump = jump.groupby('referenceentity').agg({'spread': np.mean, 'jtd': np.sum}).sort_values(by='jtd', ascending=True)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "################################### Run set of scenario\n",
- "spread_shock = np.array([-100., -25., 1., +25. , 100.])\n",
- "spread_shock /= analytics._ontr['HY'].spread\n",
"portf, _ = build_portfolio(position_date, spread_date)\n",
"vol_surface = generate_vol_surface(portf, 5)\n",
"portf.reset_pv()\n",
- "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=['pnl'],\n",
+ "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=['pnl', 'hy_equiv'],\n",
" spread_shock=spread_shock,\n",
" vol_shock=[0.0],\n",
" corr_shock=[0.0],\n",
" vol_surface=vol_surface)\n",
"\n",
- "pnl = scens.xs('pnl', axis=1, level=2)\n",
- "pnl = pnl.xs((0.0, 0.0), level=['vol_shock', 'corr_shock'])\n",
- "\n",
- "scenarios = (pnl.\n",
- " reset_index(level=['date'], drop=True).\n",
- " groupby(level=0, axis=1).sum())\n",
+ "strategies = {}\n",
+ "strategies['options'] = ['HYOPTDEL', 'HYPAYER', 'HYREC', \n",
+ " 'IGOPTDEL', 'IGPAYER', 'IGREC']\n",
+ "strategies['tranches'] = ['HYSNR', 'HYMEZ', 'HYINX', 'HYEQY', \n",
+ " 'IGSNR', 'IGMEZ', 'IGINX', 'IGEQY', \n",
+ " 'EUSNR', 'EUMEZ', 'EUINX', 'EUEQY', \n",
+ " 'XOSNR', 'XOMEZ', 'XOINX', 'XOEQY', \n",
+ " 'BSPK']\n",
"\n",
- "options = ['HYOPTDEL', 'HYPAYER', 'HYREC', 'IGOPTDEL', 'IGPAYER', 'IGREC']\n",
- "tranches = ['HYMEZ', 'HYINX', 'HYEQY', 'IGMEZ', 'IGINX', 'IGEQY', 'IGSNR', 'IGINX', 'BSPK', 'XOMEZ', 'XOINX', 'EUMEZ']\n",
- "hedges = ['HEDGE_CLO', 'HEDGE_MAC', 'HEDGE_MBS']\n",
+ "scens = scens.xs((0.0, 0.0), level=['vol_shock', 'corr_shock'])\n",
+ "scens.columns.names=['strategy', 'trade_id', 'scen_type']\n",
"\n",
- "synthetic =pd.DataFrame()\n",
- "synthetic['options'] = scenarios[set(scenarios.columns).intersection(options)].sum(axis=1)\n",
- "synthetic['tranches'] = scenarios[set(scenarios.columns).intersection(tranches)].sum(axis=1)\n",
- "synthetic['curve_trades'] = scenarios['curve_trades']\n",
- "synthetic['total'] = synthetic.sum(axis = 1)\n",
- "nav = go.get_net_navs()\n",
- "#(synthetic/nav.endbooknav[-1])\n",
- "scenarios.sum(axis=1)\n",
- "scenarios.sum(axis=1).to_clipboard()"
+ "results = {}\n",
+ "for i, g in scens.groupby(level='scen_type', axis =1):\n",
+ " temp = g.groupby(level='strategy', axis =1).sum()\n",
+ " for key, item in strategies.items():\n",
+ " exist_columns = set(temp.columns).intersection(item)\n",
+ " temp[key] = temp[exist_columns].sum(axis=1)\n",
+ " temp.drop(exist_columns, axis=1, inplace=True)\n",
+ " temp['total'] = temp.sum(axis = 1)\n",
+ " results[i] = temp\n",
+ "results = pd.concat(results)\n",
+ "results.to_clipboard()"
]
},
{
@@ -324,21 +266,14 @@
"outputs": [],
"source": [
"################################## Calculate Historical Bond Duration/Yield\n",
- "mysql_engine = dbengine('rmbs_model')\n",
- "dates = pd.date_range(datetime.date(2013, 1, 30), pd.datetime.today() - MonthEnd(1), freq=\"M\")\n",
- "calc_df = pd.DataFrame()\n",
- "sql_string = (\"SELECT distinct timestamp::date FROM priced where normalization = 'current_notional' and model_version = 1 \"\n",
- " \"and date(timestamp) < %s and date(timestamp) > %s order by timestamp desc\")\n",
- "with dbconn('etdb') as etconn, dbconn('dawndb') as dawnconn:\n",
- " for d in dates:\n",
- " timestamps = pd.read_sql_query(sql_string, dawn_engine, parse_dates=[\"timestamp\"], params=[d, d - pd.tseries.offsets.DateOffset(15, \"D\")])\n",
- " calc_df = calc_df.append(subprime_risk(d.date(), dawnconn, mysql_engine, timestamps.iloc[0,0].date()))\n",
- "calc_df=calc_df.reset_index().set_index('date')\n",
- "calc_df = calc_df.dropna(subset=['bond_yield', 'hy_equiv']) \n",
- "bond_stats = pd.DataFrame()\n",
- "for d, g in calc_df.groupby(pd.Grouper(freq='M')):\n",
- " bond_stats.loc[d, 'dur'] = sum(g.notional * g.factor * g.modDur)/sum(g.notional * g.factor)\n",
- " bond_stats.loc[d, 'yield'] = sum(g.usd_market_value * g.modDur * g.bond_yield) /sum(g.usd_market_value * g.modDur)"
+ "conn = dbconn(\"dawndb\")\n",
+ "fund = 'SERCGMAST'\n",
+ "dates = pd.date_range(datetime.date(2013, 1, 30), datetime.date.today() - MonthEnd(1), freq=\"M\")\n",
+ "bond_stats = {}\n",
+ "for d in dates:\n",
+ " g = subprime_risk(pd.to_timestamp(d), conn, dbengine(\"rmbs_model\"), fund=fund)\n",
+ " bond_stats[d, 'dur'] = sum(g.notional * g.factor * g.modDur)/sum(g.notional * g.factor)\n",
+ " bond_stats[d, 'yield'] = sum(g.usd_market_value * g.modDur * g.bond_yield) /sum(g.usd_market_value * g.modDur)"
]
},
{
@@ -347,30 +282,19 @@
"metadata": {},
"outputs": [],
"source": [
- "################################## Leverage Ratio\n",
+ "################################## Leverage Ratio - Positive and negative PV trades. The only thing missing in the calc are the USD/EUR Loans\n",
"nav = go.get_net_navs()\n",
- "portfolio = go.get_portfolio()\n",
- "monthend_portfolio = portfolio.groupby(pd.Grouper(freq=\"M\"), group_keys=False).apply(\n",
+ "portf = go.get_portfolio()\n",
+ "df = portf.groupby(pd.Grouper(freq=\"M\"), group_keys=False).apply(\n",
" lambda df: df.loc[df.index[-1]]\n",
" )\n",
- "options_list = ['IGPAYER', 'HYPAYER', 'IGREC', 'HYREC']\n",
- "syn_list = ['CSO_TRANCH', 'LQD_TRANCH', 'HEDGE_CLO', 'HEDGE_CSO', 'HEDGE_MAC', 'HEDGE_MBS',\n",
- " 'IGMEZ','HYMEZ', 'IGINX', 'MBSCDS', 'IGCURVE', 'IGOPTDEL', 'HYOPTDEL',\n",
- " 'ITRXCURVE', 'IGEQY', 'IGSNR', 'BSPK', 'HYINX', 'HYEQY', 'XCURVE', 'XOMEZ', 'XOINX', 'EUMEZ']\n",
- "\n",
- "monthend_syn = monthend_portfolio[monthend_portfolio.strat.isin(syn_list)]\n",
- "monthend_syn = monthend_syn[monthend_syn['endqty'] < 0]\n",
- "monthend_syn = monthend_syn.groupby(['periodenddate','invid']).sum()\n",
- "\n",
- "monthend_cash = monthend_portfolio[~monthend_portfolio.strat.isin(syn_list)]\n",
- "monthend_cash = monthend_cash.groupby(['periodenddate','invid']).sum()\n",
- "\n",
- "positive = pd.concat([monthend_syn, monthend_cash])\n",
- "positive = positive['endbooknav'].groupby(['periodenddate']).agg(lambda x: x[x>0].sum())\n",
- "\n",
- "nav = nav.merge(positive, left_index=True, right_index=True)\n",
- "nav['leverage'] = nav.endbooknav_y/nav.endbooknav_x\n",
- "nav['leverage'].plot()"
+ "df = df[~df['invid'].isin(['USDLOAN', 'EURLOAN'])]\n",
+ "pvs = df['endbooknav'].groupby(['periodenddate', df['endbooknav'] >0]).sum().unstack().rename(\n",
+ " columns={True:'endbooknav_pos', False:'endbooknav_neg'})\n",
+ "nav = nav.merge(pvs, left_index=True, right_index=True)\n",
+ "nav['long_leverage'] = nav['endbooknav_pos']/nav.endbooknav\n",
+ "nav['gross_leverage'] = (nav['endbooknav_pos']-nav['endbooknav_neg'])/nav.endbooknav\n",
+ "print (\"positive pv/nav: \" + str(nav['long_leverage'].mean()), \"gross pv/nav: \" + str(nav['gross_leverage'].mean()))"
]
},
{
@@ -477,9 +401,9 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3",
+ "display_name": "Python 3.8.1 64-bit",
"language": "python",
- "name": "python3"
+ "name": "python38164bitc40c8740e5d542d7959acb14be96f4f3"
},
"language_info": {
"codemirror_mode": {