{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import datetime\n", "from pandas.tseries.offsets import BDay, MonthEnd\n", "import globeop_reports as go\n", "import pandas as pd\n", "import analytics\n", "import numpy as np\n", "\n", "from analytics.index_data import get_index_quotes\n", "from analytics.scenarios import run_portfolio_scenarios\n", "from analytics import BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio,DualCorrTranche\n", "from copy import deepcopy\n", "\n", "from utils.db import dbconn, dbengine\n", "\n", "from risk.tranches import get_tranche_portfolio\n", "from risk.swaptions import get_swaption_portfolio\n", "from risk.bonds import subprime_risk, clo_risk, crt_risk\n", "\n", "dawn_engine = dbengine('dawndb')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#PNL Allocation\n", "date = datetime.date.today() - BDay(1)\n", "report_date = date - MonthEnd(1)\n", "report_date" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################### Find the strategies that are not defined: undefined needs to be mapped in strat_map\n", "strats = pd.read_csv('/home/serenitas/edwin/Python/strat_map.csv')\n", "nav = go.get_net_navs()\n", "m_pnl = go.get_monthly_pnl(['strat', 'custacctname'])\n", "m_pnl = m_pnl.reset_index().merge(strats, on=['strat', 'custacctname'], how='left')\n", "undefined = m_pnl[m_pnl.pnl.isna()].groupby(['strat', 'custacctname']).last()\n", "#Get PNL Allocation\n", "#Input latest NAVS to: '/home/serenitas/edwin/Python/subscription_fee_data.csv'\n", "pnl_alloc = m_pnl.groupby(['date', 'pnl']).sum()\n", "pnl_alloc = pnl_alloc.join(nav.begbooknav)\n", "pnl_alloc['strat_return'] = pnl_alloc.mtdtotalbookpl / pnl_alloc.begbooknav\n", "#rolling 12 months PNL per strategy - copy to RiskMonitor\n", "start_date = report_date - pd.tseries.offsets.MonthEnd(11)\n", "rolling_return = pnl_alloc[start_date:report_date].groupby('pnl').sum()['strat_return']\n", "rolling_return.to_clipboard()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################### Average Portfolio Sales Turnover - as of last monthend from today\n", "#(total Bond Sales Proceeds + paydown)/average starting 12 months NAV\n", "#Actually: Rolling 12 months sum of (total bond sales proceeds + paydown)/monthly NAV\n", "nav = go.get_net_navs()\n", "fund='SERCGMAST'\n", "sql_string = \"SELECT * FROM bonds WHERE buysell IS False and fund = %s\"\n", "df = pd.read_sql_query(sql_string, dawn_engine,\n", " parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n", " params=[fund,],\n", " index_col = 'trade_date')\n", "df = df.groupby(pd.Grouper(freq='M')).sum()\n", "#Average traded volume (Bonds only)\n", "\n", "#Now get portfolio paydown per month\n", "portfolio = go.get_portfolio()\n", "portfolio = portfolio[(portfolio.custacctname == 'V0NSCLMAMB') &\n", " (portfolio.identifier != 'USD') &\n", " (portfolio.endqty != 0)]\n", "cf = pd.read_sql_query(\"SELECT * FROM cashflow_history\", dawn_engine,\n", " parse_dates=['date'],\n", " index_col=['date']).sort_index()\n", "portfolio = portfolio.set_index('identifier', append=True)\n", "portfolio = portfolio['endqty'].groupby(['identifier', 'periodenddate']).sum()\n", "portfolio = portfolio.reset_index('identifier')\n", "df_1 = pd.merge_asof(cf, portfolio.sort_index(), left_index=True, right_index=True, by='identifier')\n", "df_1 = df_1.dropna(subset=['endqty'])\n", "df_1 = df_1[(df_1.principal_bal != 0) & (df_1.principal != 0)]\n", "df_1['paydown'] = df_1.apply(lambda df: df.endqty/df.principal_bal * df.principal, axis=1)\n", "paydowns = df_1.paydown.groupby(pd.Grouper(freq='M')).sum()\n", "temp = pd.concat([paydowns, df.principal_payment, df.accrued_payment], axis=1).fillna(0)\n", "turnover = (temp.sum(axis=1)/nav.begbooknav).rolling(12).sum()\n", "turnover[12:].plot()\n", "turnover.min(), turnover.max(), turnover.mean()\n", "turnover[-1]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################### BRINKER: Average Portfolio Sales Turnover - as of last monthend from today\n", "#(total Bond Sales Proceeds + paydown)/average starting 12 months NAV\n", "#Actually: Rolling 12 months sum of (total bond sales proceeds + paydown)/monthly NAV\n", "nav = go.get_net_navs()\n", "fund='BRINKER'\n", "sql_string = \"SELECT * FROM bonds WHERE buysell IS False and fund = %s\"\n", "df = pd.read_sql_query(sql_string, dawn_engine,\n", " parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n", " params=[fund,],\n", " index_col = 'trade_date')\n", "df = df.groupby(pd.Grouper(freq='M')).sum()\n", "\n", "cf = pd.read_sql_query(\"SELECT * FROM cashflow_history\", dawn_engine,\n", " parse_dates=['date'],\n", " index_col=['date']).sort_index()\n", "sql_string = \"SELECT description, identifier, notional, price, factor FROM risk_positions(%s, %s, 'BRINKER')\"\n", "pos = {}\n", "for d in cf.index.unique():\n", " for ac in ['Subprime', 'CRT']:\n", " pos[d, ac] = pd.read_sql_query(sql_string, dawn_engine, params=[d.date(), ac])\n", "pos = pd.concat(pos, names=['date', 'asset_class'])\n", "pos = pos.reset_index(level=[1,2])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################### Number of position (min/max/average) /position size (min/max/average) /Top 10 position size\n", "portfolio = go.get_portfolio()\n", "nav = go.get_net_navs()\n", "exc_port_list = [None, 'SERCGLLC__SERCGLLC', 'CASH', 'SERCGLTD__SERCGLTD', 'GFS_HELPER_BUSINESS_UNIT', 'SER_TEST__SER_TEST']\n", "exc_inst_list = ['CAD', 'CADF', 'SEREONUS', 'USD', 'USDF', 'USDLOAN', 'EUR', 'EURLOAN', 'USDCASHINT',\n", " 'USDLOANOLD', 'USDSWAPFEE', 'EURF','CADCASHINT','COMMISSIONFEES', 'EURCASHINT', 'COMMUNICATIONFEES']\n", "exc_inst_list2 = ['86359DUR6OLD2','004375DV0OLD4','32027GAD8OLD7','75406DAC7OLD7','86359DMN4OLD7','45661EAW4OLD7']\n", "\n", "portfolio = portfolio[~portfolio.port.isin(exc_port_list) &\n", " ~portfolio.identifier.isin(exc_inst_list) &\n", " ~portfolio.identifier.isin(exc_inst_list2)]\n", "\n", "all_positions = portfolio.groupby(['periodenddate', 'identifier'])['endbooknav'].sum() \n", "num_pos = all_positions.groupby('periodenddate').count()\n", "#min/max/mean number of positions\n", "num_pos.min(), num_pos.max(), num_pos.mean()\n", "\n", "bonds = portfolio[(portfolio.custacctname == 'V0NSCLMAMB') &\n", " (portfolio.identifier != 'USD') &\n", " (portfolio.endqty != 0) &\n", " (portfolio.port.isin(['MORTGAGES', 'STRUCTURED', 'CLO'])) &\n", " (~portfolio.strat.isin(['MBSCDS']))]\n", "\n", "monthend_bonds = bonds.groupby(pd.Grouper(freq=\"M\"), group_keys=False).apply(\n", " lambda df: df.loc[df.index[-1]]\n", " )\n", "monthend_bonds = monthend_bonds.groupby(['periodenddate', 'identifier']).sum()\n", "nav.index.rename('periodenddate', inplace=True)\n", "monthend_bonds = monthend_bonds.merge(nav, left_index=True, right_index=True, suffixes=('_bond', '_fund'))\n", "monthend_bonds['percentage'] = monthend_bonds.endbooknav_bond/monthend_bonds.endbooknav_fund\n", "last_date = monthend_bonds.index.get_level_values(0).max() \n", "latest = monthend_bonds.loc[last_date]\n", "#min/max/mean position size\n", "latest['percentage'][latest['percentage']>0.0000001].min(), latest['percentage'].max(), latest['percentage'].mean()\n", "#10th largest positions\n", "ten_largest = monthend_bonds.groupby('periodenddate').apply(lambda df: df['percentage'].nlargest(10).sum())\n", "ten_largest.min(), ten_largest.max(), ten_largest.mean()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################### Average Traded Volume\n", "nav = go.get_net_navs()\n", "sql_string = \"SELECT * FROM bonds where fund='SERCGMAST'\"\n", "bond_trades = pd.read_sql_query(sql_string, dawn_engine,\n", " parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n", " index_col = 'trade_date')\n", "g = bond_trades['principal_payment'].groupby(pd.Grouper(freq='M'))\n", "#min/max/mean bond trades by count (weekly = /4)\n", "g.count().min()/4, g.count().max()/4, g.count().mean()/4\n", "#min/max/mean bond trades by MV (weekly = /4)\n", "volume = g.sum()/nav.endbooknav\n", "volume.min()/4, volume.max()/4, volume.mean()/4\n", "\n", "sql_string = \"SELECT * FROM cds where fund='SERCGMAST'\"\n", "cds_trades = pd.read_sql_query(sql_string, dawn_engine,\n", " parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n", " index_col = 'trade_date')\n", "g = cds_trades['notional'].groupby(pd.Grouper(freq='M'))\n", "#min/max/mean cds trades by count\n", "g.count().min()/4, g.count().max()/4, g.count().mean()/4\n", "#min/max/mean cds trades by notional\n", "volume = g.sum()/nav.endbooknav\n", "volume.fillna(0, inplace=True)\n", "volume.min(), volume.max()/4, volume.mean()/4" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################### Average Holding Period\n", "#Time series of bond portfolio age (portfolio date - latest buy date of position) - weighted by MV of all bonds.\n", "#Problem is if we buy the same position again it resets to the holding period to 0\n", "nav = go.get_net_navs()\n", "sql_string = \"SELECT * FROM bonds where fund = 'SERCGMAST' order by trade_date desc\"\n", "df = pd.read_sql_query(sql_string, dawn_engine,\n", " parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n", " index_col = 'trade_date')\n", "buys = df[df.buysell == True].sort_index()\n", "buys['buy_date'] = buys.index\n", "#get portfolio \n", "port = go.get_portfolio()\n", "port.sort_index(inplace=True)\n", "buy_dates = pd.merge_asof(port, buys[['buy_date', 'identifier']], left_index=True, right_index=True,by='identifier', direction='backward')\n", "buy_dates = buy_dates[['identifier', 'endbooknav','buy_date']][~buy_dates.buy_date.isna()]\n", "buy_dates['hold_days'] = (buy_dates.index - buy_dates.buy_date)/np.timedelta64(1, 'D')\n", "def weighted_average(df):\n", " return np.average(df.hold_days,weights=df.endbooknav)\n", "hold_period = buy_dates.groupby('periodenddate').apply(func = weighted_average)\n", "hold_period_last_five = hold_period.loc[datetime.date.today()- datetime.timedelta(weeks=52*5)::]\n", "hold_period_last_five.min(), hold_period_last_five.max(), hold_period_last_five.mean()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################## Calculate Historical Bond Duration/Yield\n", "analytics.init_ontr()\n", "mysql_engine = dbengine('rmbs_model')\n", "dates = pd.date_range(datetime.date(2013, 1, 30), pd.datetime.today() - MonthEnd(1), freq=\"M\")\n", "calc_df = pd.DataFrame()\n", "sql_string = (\"SELECT distinct timestamp::date FROM priced where normalization = 'current_notional' and model_version = 1 \"\n", " \"and date(timestamp) < %s and date(timestamp) > %s order by timestamp desc\")\n", "with dbconn('etdb') as etconn, dbconn('dawndb') as dawnconn:\n", " for d in dates:\n", " timestamps = pd.read_sql_query(sql_string, dawn_engine, parse_dates=[\"timestamp\"], params=[d, d - pd.tseries.offsets.DateOffset(15, \"D\")])\n", " calc_df = calc_df.append(subprime_risk(d.date(), dawnconn, mysql_engine, timestamps.iloc[0,0].date()))\n", "calc_df=calc_df.reset_index().set_index('date')\n", "calc_df = calc_df.dropna(subset=['bond_yield', 'hy_equiv']) \n", "bond_stats = pd.DataFrame()\n", "for d, g in calc_df.groupby(pd.Grouper(freq='M')):\n", " bond_stats.loc[d, 'dur'] = sum(g.notional * g.factor * g.modDur)/sum(g.notional * g.factor)\n", " bond_stats.loc[d, 'yield'] = sum(g.usd_market_value * g.modDur * g.bond_yield) /sum(g.usd_market_value * g.modDur)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################## Leverage Ratio\n", "nav = go.get_net_navs()\n", "portfolio = go.get_portfolio()\n", "monthend_portfolio = portfolio.groupby(pd.Grouper(freq=\"M\"), group_keys=False).apply(\n", " lambda df: df.loc[df.index[-1]]\n", " )\n", "monthend_byinvid = monthend_portfolio.groupby(['periodenddate','invid']).sum()\n", "positive = monthend_byinvid['endbooknav'].groupby(['periodenddate']).agg(lambda x: x[x>0].sum())\n", "nav = nav.merge(positive, left_index=True, right_index=True)\n", "nav['leverage'] = nav.endbooknav_y/nav.endbooknav_x\n", "nav['leverage'].plot()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################## FX Exposure, any net CAD/EUR exposures are FX exposure. \n", "################### doesn't add up to 1 including the USD as we now sum up all the NAVs after adjusting the Futures\n", "nav = go.get_net_navs()\n", "portfolio = go.get_portfolio()\n", "monthend_portfolio = portfolio.groupby(pd.Grouper(freq=\"M\"), group_keys=False).apply(\n", " lambda df: df.loc[df.index[-1]]\n", " )\n", "#Adjust the endbooknav of futures\n", "tickers = ['CD_CME', 'EC_CME']\n", "factors = [100000, 125000]\n", "currency = ['CAD', 'EUR']\n", "for a, b, c in zip(tickers, factors, currency):\n", " new_endbooknav = monthend_portfolio['endqty'] * monthend_portfolio['endlocalmarketprice'] * b\n", " monthend_portfolio['endbooknav'] = new_endbooknav.where(monthend_portfolio['invid'].str.contains(a), monthend_portfolio['endbooknav'])\n", " monthend_portfolio.loc[monthend_portfolio['invid'].str.contains(a), 'invccy'] = c\n", "\n", "monthend_portfolio = monthend_portfolio.merge(nav, left_index=True, right_index=True, suffixes=('_inst', '_fund'))\n", "monthend_portfolio.index.name = 'periodenddate'\n", "monthend_portfolio['percent_nav'] = monthend_portfolio['endbooknav_inst']/monthend_portfolio['endbooknav_fund']\n", "\n", "collateral_filter =monthend_portfolio['invid'].str.contains('LOAN')\n", "futures_filter = monthend_portfolio['invid'].str.contains('|'.join(tickers))\n", "cash_filter = ((monthend_portfolio['invid'] == 'CAD') | (monthend_portfolio['invid'] == 'EUR'))\n", "trades = monthend_portfolio[(~futures_filter) & (~collateral_filter) & (~cash_filter)]\n", "names = ['collateral', 'futures', 'cash', 'trades']\n", "categories = [monthend_portfolio[collateral_filter], \n", " monthend_portfolio[futures_filter], \n", " monthend_portfolio[cash_filter],\n", " trades]\n", "exposure = {}\n", "for n, x in zip(names, categories):\n", " exposure[n] = x.groupby(['periodenddate', 'invccy']).sum()\n", "exposure = pd.concat(exposure)['percent_nav']\n", "exposure.unstack(level=1).T.to_clipboard()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################## Historical Notioinals and HY Equiv\n", "dates = pd.date_range(datetime.date(2013, 1, 30), pd.datetime.today() - MonthEnd(1), freq=\"BM\")\n", "portfs = {}\n", "for d in dates:\n", " d = d.date()\n", " portfs[d] = build_portf(d)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "##################################\n", "def build_portf(position_date, spread_date=None):\n", " if spread_date is None:\n", " spread_date=position_date\n", " conn = dawn_engine.raw_connection()\n", " mysql_engine = dbengine('rmbs_model')\n", " mysqlcrt_engine = dbengine('crt')\n", " \n", " on_the_run_index = CreditIndex('HY', on_the_run('HY', position_date), '5yr', value_date=position_date)\n", "\n", " #tranche positions\n", " portf = get_tranche_portfolio(position_date, conn, False, 'SERCGMAST')\n", " #swaption positions\n", " s_portf = get_swaption_portfolio(position_date, conn)\n", " if bool(s_portf):\n", " for t, id in zip(s_portf.trades, s_portf.trade_ids):\n", " portf.add_trade(t, id)\n", "\n", " #index positions\n", " df = pd.read_sql_query(\"SELECT * from list_cds_positions_by_strat(%s)\",\n", " dawn_engine, params=(position_date,))\n", " if not(df.empty):\n", " df_no_curve = df[~df.folder.str.contains(\"CURVE\")]\n", " for t in df_no_curve.itertuples(index=False):\n", " portf.add_trade(CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional),\n", " (t.folder, t.security_desc))\n", "\n", " #separately add in curve delta\n", " df_curve = df[df.folder.str.contains(\"CURVE\")]\n", " curve_portf = Portfolio([CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional)\n", " for t in df_curve.itertuples(index=False)])\n", " curve_portf.value_date = spread_date\n", " curve_portf.mark()\n", "\n", " hyontr = deepcopy(on_the_run_index)\n", " hyontr.notional = curve_portf.hy_equiv\n", " portf.add_trade(hyontr, ('curve_trades', ''))\n", "\n", " #get bond risks:\n", " sql_string = (\"SELECT distinct timestamp::date FROM priced where normalization = 'current_notional' and model_version = 1 \"\n", " \"and date(timestamp) < %s and date(timestamp) > %s order by timestamp desc\")\n", " with dbconn('etdb') as etconn, dbconn('dawndb') as dawnconn:\n", " timestamps = pd.read_sql_query(sql_string, dawn_engine, parse_dates=[\"timestamp\"], \n", " params=[position_date, position_date - pd.tseries.offsets.DateOffset(15, \"D\")])\n", " rmbs_pos = subprime_risk(position_date, dawnconn, mysql_engine, timestamps.iloc[0][0].date())\n", " clo_pos = clo_risk(position_date, dawnconn, etconn)\n", " crt_pos = crt_risk(position_date, dawnconn, mysqlcrt_engine)\n", " rmbs_notional = 0\n", " for pos in [rmbs_pos, crt_pos]:\n", " rmbs_notional += pos['hy_equiv'].sum() if pos is not None else 0\n", " hyontr_rmbs = deepcopy(on_the_run_index)\n", " hyontr_rmbs.notional = -rmbs_notional\n", " portf.add_trade(hyontr_rmbs, ('rmbs_bonds', ''))\n", " if isinstance(clo_pos, pd.DataFrame):\n", " hyontr_clos = deepcopy(on_the_run_index)\n", " hyontr_clos.notional = -clo_pos['hy_equiv'].sum()\n", " portf.add_trade(hyontr_clos, ('clo_bonds', ''))\n", " \n", " portf.value_date = spread_date\n", " portf.mark(interp_method=\"bivariate_linear\")\n", " portf.reset_pv()\n", " \n", " return portf" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################### Calculate stress scenario \n", "position_date = (datetime.date.today() - BDay(1)).date()\n", "spread_date = position_date\n", "#analytics.init_ontr(spread_date)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#Calculate amount of stress for reports\n", "from analytics.curve_trades import on_the_run\n", "df = get_index_quotes('HY', list(range(on_the_run('HY', spread_date) - 10, on_the_run('HY', spread_date) + 1)),\n", " tenor=['5yr'], years=5)\n", "df = df.xs('5yr', level='tenor')['close_spread'].groupby(['date', 'series']).last()\n", "\n", "widen, tighten = [], []\n", "#approximately 1,3,6 months move (22 each months)\n", "for days in [22, 66, 132]: \n", " calc = df.unstack().pct_change(freq= str(days)+'B').stack().groupby('date').last()\n", " widen.append(calc.max())\n", " tighten.append(calc.min())\n", "pd.DataFrame([widen, tighten], columns=['1M', '3M', '6M'], index=['widen', 'tighten'])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#tranche/swaption positions\n", "portf = build_portf(position_date, spread_date)\n", "\n", "vol_surface = {}\n", "for trade in portf.swaptions:\n", " try:\n", " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n", " value_date=spread_date, interp_method = \"bivariate_linear\")\n", " except:\n", " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series + 1, \n", " value_date=spread_date, interp_method = \"bivariate_linear\")\n", " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(source='GS', option_type=trade.option_type)[-1]]\n", "\n", "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=[\"pnl\"],\n", " spread_shock=widen,\n", " vol_shock=[0],\n", " corr_shock = [0],\n", " vol_surface=vol_surface)\n", "\n", "attrib = (scens.\n", " reset_index(level=['date'], drop=True).\n", " groupby(level=0, axis=1).sum())\n", "results = attrib.xs((widen[2], 0., 0.), level=['spread_shock', 'corr_shock', 'vol_shock']).T" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "results.to_clipboard(header=True)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "################################### Run set of scenario\n", "spread_shock = np.round(np.arange(-.2, 1, .05), 3)\n", "scens = run_portfolio_scenarios(portf, date_range, params=['pnl', 'delta'],\n", " spread_shock=spread_shock,\n", " vol_shock=vol_shock,\n", " corr_shock=[0],\n", " vol_surface=vol_surface)\n", "\n", "pnl = scens.xs('pnl', axis=1, level=2)\n", "pnl = pnl.xs((0,0), level=['vol_shock', 'corr_shock'])\n", "\n", "scenarios = (pnl.\n", " reset_index(level=['date'], drop=True).\n", " groupby(level=0, axis=1).sum())\n", "\n", "options = ['HYOPTDEL', 'HYPAYER', 'HYREC', 'IGOPTDEL', 'IGPAYER', 'IGREC']\n", "tranches = ['HYMEZ', 'HYINX', 'HYEQY', 'IGMEZ', 'IGINX', 'IGEQY', 'IGSNR', 'IGINX', 'BSPK', 'XOMEZ', 'XOINX']\n", "\n", "scenarios['options'] = scenarios[set(scenarios.columns).intersection(options)].sum(axis=1)\n", "scenarios['tranches'] = scenarios[set(scenarios.columns).intersection(tranches)].sum(axis=1)\n", "\n", "synthetic = scenarios[['options', 'tranches', 'curve_trades']]\n", "synthetic['total'] = synthetic.sum(axis = 1)\n", "nav = go.get_net_navs()\n", "(synthetic/nav.endbooknav[-1]).plot()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.4" } }, "nbformat": 4, "nbformat_minor": 4 }