aboutsummaryrefslogtreecommitdiffstats
path: root/python/notebooks/Reto Report.ipynb
diff options
context:
space:
mode:
Diffstat (limited to 'python/notebooks/Reto Report.ipynb')
-rw-r--r--python/notebooks/Reto Report.ipynb222
1 files changed, 160 insertions, 62 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index 6769a123..2e1eb626 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -16,6 +16,7 @@
"from analytics.index_data import get_index_quotes\n",
"from analytics.scenarios import run_portfolio_scenarios\n",
"from analytics import BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio,DualCorrTranche\n",
+ "from copy import deepcopy\n",
"\n",
"from utils.db import dbconn, dbengine\n",
"\n",
@@ -99,6 +100,7 @@
"temp = pd.concat([paydowns, df.principal_payment, df.accrued_payment], axis=1).fillna(0)\n",
"turnover = (temp.sum(axis=1)/nav.begbooknav).rolling(12).sum()\n",
"turnover[12:].plot()\n",
+ "turnover.min(), turnover.max(), turnover.mean()\n",
"turnover[-1]"
]
},
@@ -108,11 +110,11 @@
"metadata": {},
"outputs": [],
"source": [
- "################################### Average Portfolio Sales Turnover - as of last monthend from today\n",
+ "################################### BRINKER: Average Portfolio Sales Turnover - as of last monthend from today\n",
"#(total Bond Sales Proceeds + paydown)/average starting 12 months NAV\n",
"#Actually: Rolling 12 months sum of (total bond sales proceeds + paydown)/monthly NAV\n",
"nav = go.get_net_navs()\n",
- "fund='SERCGMAST'\n",
+ "fund='BRINKER'\n",
"sql_string = \"SELECT * FROM bonds WHERE buysell IS False and fund = %s\"\n",
"df = pd.read_sql_query(sql_string, dawn_engine,\n",
" parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n",
@@ -138,15 +140,75 @@
"metadata": {},
"outputs": [],
"source": [
- "################################### Average Monthly Traded Volume\n",
+ "################################### Number of position (min/max/average) /position size (min/max/average) /Top 10 position size\n",
+ "portfolio = go.get_portfolio()\n",
"nav = go.get_net_navs()\n",
- "sql_string = \"SELECT * FROM bonds\"\n",
- "df = pd.read_sql_query(sql_string, dawn_engine,\n",
+ "exc_port_list = [None, 'SERCGLLC__SERCGLLC', 'CASH', 'SERCGLTD__SERCGLTD', 'GFS_HELPER_BUSINESS_UNIT', 'SER_TEST__SER_TEST']\n",
+ "exc_inst_list = ['CAD', 'CADF', 'SEREONUS', 'USD', 'USDF', 'USDLOAN', 'EUR', 'EURLOAN', 'USDCASHINT',\n",
+ " 'USDLOANOLD', 'USDSWAPFEE', 'EURF','CADCASHINT','COMMISSIONFEES', 'EURCASHINT', 'COMMUNICATIONFEES']\n",
+ "exc_inst_list2 = ['86359DUR6OLD2','004375DV0OLD4','32027GAD8OLD7','75406DAC7OLD7','86359DMN4OLD7','45661EAW4OLD7']\n",
+ "\n",
+ "portfolio = portfolio[~portfolio.port.isin(exc_port_list) &\n",
+ " ~portfolio.identifier.isin(exc_inst_list) &\n",
+ " ~portfolio.identifier.isin(exc_inst_list2)]\n",
+ "\n",
+ "all_positions = portfolio.groupby(['periodenddate', 'identifier'])['endbooknav'].sum() \n",
+ "num_pos = all_positions.groupby('periodenddate').count()\n",
+ "#min/max/mean number of positions\n",
+ "num_pos.min(), num_pos.max(), num_pos.mean()\n",
+ "\n",
+ "bonds = portfolio[(portfolio.custacctname == 'V0NSCLMAMB') &\n",
+ " (portfolio.identifier != 'USD') &\n",
+ " (portfolio.endqty != 0) &\n",
+ " (portfolio.port.isin(['MORTGAGES', 'STRUCTURED', 'CLO'])) &\n",
+ " (~portfolio.strat.isin(['MBSCDS']))]\n",
+ "\n",
+ "monthend_bonds = bonds.groupby(pd.Grouper(freq=\"M\"), group_keys=False).apply(\n",
+ " lambda df: df.loc[df.index[-1]]\n",
+ " )\n",
+ "monthend_bonds = monthend_bonds.groupby(['periodenddate', 'identifier']).sum()\n",
+ "nav.index.rename('periodenddate', inplace=True)\n",
+ "monthend_bonds = monthend_bonds.merge(nav, left_index=True, right_index=True, suffixes=('_bond', '_fund'))\n",
+ "monthend_bonds['percentage'] = monthend_bonds.endbooknav_bond/monthend_bonds.endbooknav_fund\n",
+ "last_date = monthend_bonds.index.get_level_values(0).max() \n",
+ "latest = monthend_bonds.loc[last_date]\n",
+ "#min/max/mean position size\n",
+ "latest['percentage'][latest['percentage']>0.0000001].min(), latest['percentage'].max(), latest['percentage'].mean()\n",
+ "#10th largest positions\n",
+ "ten_largest = monthend_bonds.groupby('periodenddate').apply(lambda df: df['percentage'].nlargest(10).sum())\n",
+ "ten_largest.min(), ten_largest.max(), ten_largest.mean()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "################################### Average Traded Volume\n",
+ "nav = go.get_net_navs()\n",
+ "sql_string = \"SELECT * FROM bonds where fund='SERCGMAST'\"\n",
+ "bond_trades = pd.read_sql_query(sql_string, dawn_engine,\n",
" parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n",
" index_col = 'trade_date')\n",
- "df = df.groupby(pd.Grouper(freq='M')).sum()\n",
- "volume = df.principal_payment/nav.endbooknav\n",
- "volume.mean()"
+ "g = bond_trades['principal_payment'].groupby(pd.Grouper(freq='M'))\n",
+ "#min/max/mean bond trades by count (weekly = /4)\n",
+ "g.count().min()/4, g.count().max()/4, g.count().mean()/4\n",
+ "#min/max/mean bond trades by MV (weekly = /4)\n",
+ "volume = g.sum()/nav.endbooknav\n",
+ "volume.min()/4, volume.max()/4, volume.mean()/4\n",
+ "\n",
+ "sql_string = \"SELECT * FROM cds where fund='SERCGMAST'\"\n",
+ "cds_trades = pd.read_sql_query(sql_string, dawn_engine,\n",
+ " parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n",
+ " index_col = 'trade_date')\n",
+ "g = cds_trades['notional'].groupby(pd.Grouper(freq='M'))\n",
+ "#min/max/mean cds trades by count\n",
+ "g.count().min()/4, g.count().max()/4, g.count().mean()/4\n",
+ "#min/max/mean cds trades by notional\n",
+ "volume = g.sum()/nav.endbooknav\n",
+ "volume.fillna(0, inplace=True)\n",
+ "volume.min(), volume.max()/4, volume.mean()/4"
]
},
{
@@ -159,7 +221,7 @@
"#Time series of bond portfolio age (portfolio date - latest buy date of position) - weighted by MV of all bonds.\n",
"#Problem is if we buy the same position again it resets to the holding period to 0\n",
"nav = go.get_net_navs()\n",
- "sql_string = \"SELECT * FROM bonds order by trade_date desc\"\n",
+ "sql_string = \"SELECT * FROM bonds where fund = 'SERCGMAST' order by trade_date desc\"\n",
"df = pd.read_sql_query(sql_string, dawn_engine,\n",
" parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n",
" index_col = 'trade_date')\n",
@@ -173,7 +235,9 @@
"buy_dates['hold_days'] = (buy_dates.index - buy_dates.buy_date)/np.timedelta64(1, 'D')\n",
"def weighted_average(df):\n",
" return np.average(df.hold_days,weights=df.endbooknav)\n",
- "hold_period = buy_dates.groupby('periodenddate').apply(func = weighted_average)"
+ "hold_period = buy_dates.groupby('periodenddate').apply(func = weighted_average)\n",
+ "hold_period_last_five = hold_period.loc[datetime.date.today()- datetime.timedelta(weeks=52*5)::]\n",
+ "hold_period_last_five.min(), hold_period_last_five.max(), hold_period_last_five.mean()"
]
},
{
@@ -208,11 +272,84 @@
"metadata": {},
"outputs": [],
"source": [
+ "################################## Leverage Ratio\n",
+ "nav = go.get_net_navs()\n",
+ "portfolio = go.get_portfolio()\n",
+ "monthend_portfolio = portfolio.groupby(pd.Grouper(freq=\"M\"), group_keys=False).apply(\n",
+ " lambda df: df.loc[df.index[-1]]\n",
+ " )\n",
+ "monthend_byinvid = monthend_portfolio.groupby(['periodenddate','invid']).sum()\n",
+ "positive = monthend_byinvid['endbooknav'].groupby(['periodenddate']).agg(lambda x: x[x>0].sum())\n",
+ "nav = nav.merge(positive, left_index=True, right_index=True)\n",
+ "nav['leverage'] = nav.endbooknav_y/nav.endbooknav_x\n",
+ "nav['leverage'].plot()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "##################################\n",
+ "def build_portf(position_date, spread_date):\n",
+ " conn = dawn_engine.raw_connection()\n",
+ " mysql_engine = dbengine('rmbs_model')\n",
+ " mysqlcrt_engine = dbengine('crt')\n",
+ "\n",
+ " portf = get_tranche_portfolio(position_date, conn, False, 'SERCGMAST')\n",
+ " s_portf = get_swaption_portfolio(position_date, conn)\n",
+ " for t, id in zip(s_portf.trades, s_portf.trade_ids):\n",
+ " portf.add_trade(t, id)\n",
+ "\n",
+ " #index positions\n",
+ " df = pd.read_sql_query(\"SELECT * from list_cds_positions_by_strat(%s)\",\n",
+ " dawn_engine, params=(position_date,))\n",
+ " df_no_curve = df[~df.folder.str.contains(\"CURVE\")]\n",
+ " for t in df_no_curve.itertuples(index=False):\n",
+ " portf.add_trade(CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional),\n",
+ " (t.folder, t.security_desc))\n",
+ "\n",
+ " #separately add in curve delta\n",
+ " df_curve = df[df.folder.str.contains(\"CURVE\")]\n",
+ " curve_portf = Portfolio([CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional)\n",
+ " for t in df_curve.itertuples(index=False)])\n",
+ " curve_portf.value_date = spread_date\n",
+ " curve_portf.mark()\n",
+ " \n",
+ " hyontr = deepcopy(analytics._ontr)\n",
+ " hyontr.notional = curve_portf.hy_equiv\n",
+ " portf.add_trade(hyontr, ('curve_trades', ''))\n",
+ "\n",
+ " #get bond risks:\n",
+ " with dbconn('etdb') as etconn, dbconn('dawndb') as dawnconn:\n",
+ " rmbs_pos = subprime_risk(position_date, dawnconn, mysql_engine)\n",
+ " clo_pos = clo_risk(position_date, dawnconn, etconn)\n",
+ " crt_pos = crt_risk(position_date, dawnconn, mysqlcrt_engine)\n",
+ " notional = 0\n",
+ " for pos in [rmbs_pos, clo_pos, crt_pos]:\n",
+ " notional += pos['hy_equiv'].sum() if pos is not None else 0\n",
+ " \n",
+ " hyontr_1 = deepcopy(analytics._ontr)\n",
+ " hyontr_1.notional = -notional\n",
+ " portf.add_trade(hyontr_1, ('bonds', ''))\n",
+ " \n",
+ " portf.value_date = spread_date\n",
+ " portf.mark(interp_method=\"bivariate_linear\")\n",
+ " portf.reset_pv()\n",
+ " \n",
+ " return portf"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
"################################### Calculate stress scenario \n",
"position_date = (datetime.date.today() - BDay(1)).date()\n",
- "shock_date = (datetime.date.today() - BDay(1)).date()\n",
- "spread_date = shock_date\n",
- "(position_date, spread_date, shock_date)\n",
+ "spread_date = (datetime.date.today() - BDay(1)).date()\n",
"analytics.init_ontr(spread_date)"
]
},
@@ -243,59 +380,20 @@
"metadata": {},
"outputs": [],
"source": [
- "#tranche positions\n",
- "conn = dawn_engine.raw_connection()\n",
- "mysql_engine = dbengine('rmbs_model')\n",
- "mysqlcrt_engine = dbengine('crt')\n",
- "\n",
- "portf = get_tranche_portfolio(position_date, conn, False, 'SERCGMAST')\n",
- "s_portf = get_swaption_portfolio(position_date, conn)\n",
- "for t, id in zip(s_portf.trades, s_portf.trade_ids):\n",
- " portf.add_trade(t, id)\n",
- "\n",
- "#index positions\n",
- "df = pd.read_sql_query(\"SELECT * from list_cds_positions_by_strat(%s)\",\n",
- " dawn_engine, params=(position_date,))\n",
- "df_no_curve = df[~df.folder.str.contains(\"CURVE\")]\n",
- "for t in df_no_curve.itertuples(index=False):\n",
- " portf.add_trade(CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional),\n",
- " (t.folder, t.security_desc))\n",
- " \n",
- "#separately add in curve delta\n",
- "df_curve = df[df.folder.str.contains(\"CURVE\")]\n",
- "curve_portf = Portfolio([CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional)\n",
- " for t in df_curve.itertuples(index=False)])\n",
- "curve_portf.value_date = spread_date\n",
- "curve_portf.mark()\n",
- "\n",
- "portf.add_trade(CreditIndex('HY', on_the_run('HY', spread_date), '5yr', \n",
- " value_date=spread_date, \n",
- " notional=curve_portf.hy_equiv), ('curve_trades', ''))\n",
- "\n",
- "#get bond risks:\n",
- "with dbconn('etdb') as etconn, dbconn('dawndb') as dawnconn:\n",
- " rmbs_pos = subprime_risk(position_date, dawnconn, mysql_engine)\n",
- " clo_pos = clo_risk(position_date, dawnconn, etconn)\n",
- " crt_pos = crt_risk(position_date, dawnconn, mysqlcrt_engine)\n",
- "if clo_pos is None:\n",
- " notional = rmbs_pos['hy_equiv'].sum() + crt_pos['hy_equiv'].sum()\n",
- "else:\n",
- " notional = rmbs_pos['hy_equiv'].sum() + clo_pos['hy_equiv'].sum() + crt_pos['hy_equiv'].sum()\n",
- "portf.add_trade(CreditIndex('HY', on_the_run('HY', spread_date), '5yr', \n",
- " value_date = spread_date, \n",
- " notional = -notional), ('bonds', ''))\n",
- " \n",
- "portf.value_date = spread_date\n",
- "portf.mark(interp_method=\"bivariate_linear\")\n",
- "portf.reset_pv()\n",
+ "#tranche/swaption positions\n",
+ "portf = build_portf(position_date, spread_date)\n",
"\n",
"vol_surface = {}\n",
"for trade in portf.swaptions:\n",
- " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n",
- " value_date=spread_date, interp_method = \"bivariate_linear\")\n",
+ " try:\n",
+ " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n",
+ " value_date=spread_date, interp_method = \"bivariate_linear\")\n",
+ " except:\n",
+ " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series + 1, \n",
+ " value_date=spread_date, interp_method = \"bivariate_linear\")\n",
" vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(source='MS', option_type=trade.option_type)[-1]]\n",
"\n",
- "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(shock_date)], params=[\"pnl\"],\n",
+ "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=[\"pnl\"],\n",
" spread_shock=widen,\n",
" vol_shock=[0],\n",
" corr_shock = [0],\n",
@@ -338,7 +436,7 @@
" groupby(level=0, axis=1).sum())\n",
"\n",
"options = ['HYOPTDEL', 'HYPAYER', 'HYREC', 'IGOPTDEL', 'IGPAYER', 'IGREC']\n",
- "tranches = ['HYMEZ', 'HYINX', 'HYEQY', 'IGMEZ', 'IGINX', 'IGEQY', 'IGSNR', 'IGINX', 'BSPK']\n",
+ "tranches = ['HYMEZ', 'HYINX', 'HYEQY', 'IGMEZ', 'IGINX', 'IGEQY', 'IGSNR', 'IGINX', 'BSPK', 'XOMEZ', 'XOINX']\n",
"\n",
"scenarios['options'] = scenarios[set(scenarios.columns).intersection(options)].sum(axis=1)\n",
"scenarios['tranches'] = scenarios[set(scenarios.columns).intersection(tranches)].sum(axis=1)\n",