diff options
Diffstat (limited to 'python/notebooks')
| -rw-r--r-- | python/notebooks/Allocation Reports.ipynb | 9 | ||||
| -rw-r--r-- | python/notebooks/Reto Report.ipynb | 46 | ||||
| -rw-r--r-- | python/notebooks/VaR.ipynb | 37 |
3 files changed, 62 insertions, 30 deletions
diff --git a/python/notebooks/Allocation Reports.ipynb b/python/notebooks/Allocation Reports.ipynb index b20f77d0..e64b84b0 100644 --- a/python/notebooks/Allocation Reports.ipynb +++ b/python/notebooks/Allocation Reports.ipynb @@ -245,8 +245,8 @@ "outputs": [], "source": [ "#Positions and Risks\n", - "rmbs_pos = go.get_rmbs_pos_df()\n", - "clo_pos = go.get_clo_pos_df()" + "rmbs_pos = go.hist_pos(asset_class = 'rmbs')\n", + "clo_pos = go.hist_pos(asset_class = 'clo')" ] }, { @@ -302,8 +302,9 @@ "#RMBS Risk - need RMBS Positions and Risks\n", "sql_string = \"select date, duration, series from on_the_run where index = 'HY'\"\n", "duration = pd.read_sql_query(sql_string, Sengine, parse_dates=['date'], index_col=['date'])\n", - "df = pd.merge_asof(rmbs_pos.sort_index(), duration, left_index=True, right_index=True)\n", - "rmbs_hy_equiv = df.groupby('timestamp').apply(lambda df: sum(df.delta_yield/df.duration * 100))\n", + "rmbs_pos = pd.merge_asof(rmbs_pos.sort_index(), duration, left_index=True, right_index=True)\n", + "rmbs_pos['hy_equiv'] = rmbs_pos.delta_yield/rmbs_pos.duration * 100\n", + "rmbs_pos.groupby('timestamp').sum()\n", "#hy_equiv.plot()" ] }, diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb index 6e62b252..3b97de27 100644 --- a/python/notebooks/Reto Report.ipynb +++ b/python/notebooks/Reto Report.ipynb @@ -123,6 +123,17 @@ "metadata": {}, "outputs": [], "source": [ + "position_date = (datetime.date.today() - pd.tseries.offsets.BDay(1)).date()\n", + "shock_date = (datetime.date.today() - pd.tseries.offsets.BDay(2)).date()\n", + "(position_date, shock_date)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "#Current tranche and swaptions positions\n", "t_sql_string = (\"SELECT id, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n", " \"OVER (partition by security_id, attach) AS ntl_agg \"\n", @@ -138,50 +149,55 @@ " \"AND trade_date <= %s\")\n", "with conn.cursor() as c:\n", " #Get Tranche Trade Ids\n", - " c.execute(t_sql_string, (date,))\n", + " c.execute(t_sql_string, (position_date,))\n", " t_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n", " #Get Swaption Trade Ids\n", - " c.execute(swaption_sql_string, (date, date))\n", + " c.execute(swaption_sql_string, (position_date, position_date))\n", " swaption_trades = c.fetchall()\n", " #Get Index/deltas Trade Ids\n", - " c.execute(index_sql_string, (date,))\n", + " c.execute(index_sql_string, (position_date,))\n", " index_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n", " \n", "portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid in t_trade_ids],\n", - " t_trade_ids)\n", + " ['trn_'+ str(a) for a in t_trade_ids])\n", "for row in swaption_trades:\n", - " option_delta = CreditIndex(row[1].split()[1], row[1].split()[3][1:], '5yr', date)\n", + " option_delta = CreditIndex(row[1].split()[1], row[1].split()[3][1:], '5yr', position_date)\n", " option_delta.mark()\n", " portf.add_trade(BlackSwaption.from_tradeid(row[0], option_delta), 'opt_' + str(row[0]))\n", "for index_id in index_trade_ids:\n", " portf.add_trade(CreditIndex.from_tradeid(index_id), 'index_' + str(index_id))\n", " \n", - "#Update manually - positive notional = long risk\n", - "non_trancheSwap_risk_notional = 49119912 \n", - "\n", - "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = date, notional = -non_trancheSwap_risk_notional), 'bond')\n", + "#get bond risks:\n", + "rmbs_pos = go.rmbs_pos(position_date)\n", + "r = serenitasdb.execute(\"select duration from on_the_run where index = 'HY' and date = %s\",\n", + " shock_date)\n", + "duration, = next(r)\n", + "rmbs_pos['hy_equiv'] = rmbs_pos['delta_yield']/duration * 100\n", + "notional = rmbs_pos['hy_equiv'].sum()\n", + "notional = 47633776\n", + "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = shock_date, notional = -notional), 'rmbs_bond')\n", " \n", - "portf.value_date = date\n", + "portf.value_date = shock_date\n", "portf.mark(interp_method=\"bivariate_linear\")\n", "portf.reset_pv()\n", "\n", "vol_surface = {}\n", "for trade in portf.swaptions:\n", " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n", - " value_date=date, interp_method = \"bivariate_linear\")\n", + " value_date=shock_date, interp_method = \"bivariate_linear\")\n", " vol_surface[trade.index.index_type + trade.index.series] = vs[vs.list(option_type='payer')[-1]]\n", "vol_shock = [0]\n", "corr_shock = [0]\n", - "spread_shock = widen + tighten\n", - "date_range = [pd.Timestamp(date)]\n", + "spread_shock = tighten + [0] + widen\n", + "date_range = [pd.Timestamp(shock_date)]\n", "\n", - "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\"],\n", + "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\", \"hy_equiv\"],\n", " spread_shock=spread_shock,\n", " vol_shock=vol_shock,\n", " corr_shock=corr_shock,\n", " vol_surface=vol_surface)\n", "\n", - "scens.sum(axis=1)" + "scens.xs('pnl', level=1, axis=1).sum(axis=1)" ] }, { diff --git a/python/notebooks/VaR.ipynb b/python/notebooks/VaR.ipynb index de2c26a1..ba59d717 100644 --- a/python/notebooks/VaR.ipynb +++ b/python/notebooks/VaR.ipynb @@ -16,6 +16,7 @@ "import exploration.VaR as var\n", "import pandas as pd\n", "import numpy as np\n", + "import globeop_reports as go\n", "\n", "conn = dbconn('dawndb')\n", "dawndb = dbengine('dawndb')\n", @@ -84,10 +85,10 @@ "outputs": [], "source": [ "#Import the IM at the FCM account: calculate the IM share of different strategies as a share of VaR\n", - "filename = date.strftime('%Y%m%d') + \"_OTC_MARGIN.csv\"\n", - "margin_df = pd.read_csv(\"/home/serenitas/Daily/SG_reports/\" + filename, index_col='System Currency')\n", - "mortg_hedge_im = mort_hedge_var + mort_hedge_var/(mort_hedge_var + ig_curve_var) * margin_df.loc[('USD', 'SG Settlement Margin')]\n", - "mortg_hedge_im" + "#filename = date.strftime('%Y%m%d') + \"_OTC_MARGIN.csv\"\n", + "#margin_df = pd.read_csv(\"/home/serenitas/Daily/SG_reports/\" + filename, index_col='System Currency')\n", + "#mortg_hedge_im = mort_hedge_var + mort_hedge_var/(mort_hedge_var + ig_curve_var) * margin_df.loc[('USD', 'SG Settlement Margin')]\n", + "#mortg_hedge_im" ] }, { @@ -163,7 +164,7 @@ " index_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n", " \n", "portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid in t_trade_ids],\n", - " t_trade_ids)\n", + " ['trn_'+ str(a) for a in t_trade_ids])\n", "for row in swaption_trades:\n", " option_delta = CreditIndex(row[1].split()[1], row[1].split()[3][1:], '5yr', position_date)\n", " option_delta.mark()\n", @@ -171,9 +172,15 @@ "for index_id in index_trade_ids:\n", " portf.add_trade(CreditIndex.from_tradeid(index_id), 'index_' + str(index_id))\n", " \n", - "#Update manually - positive notional = long risk\n", - "non_trancheSwap_risk_notional = 49119912 \n", - "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = shock_date, notional = -non_trancheSwap_risk_notional), 'bond')\n", + "#get bond risks:\n", + "rmbs_pos = go.rmbs_pos(position_date)\n", + "r = serenitasdb.execute(\"select duration from on_the_run where index = 'HY' and date = %s\",\n", + " shock_date)\n", + "duration, = next(r)\n", + "rmbs_pos['hy_equiv'] = rmbs_pos['delta_yield']/duration * 100\n", + "notional\n", + "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = shock_date, \n", + " notional = rmbs_pos['hy_equiv'].sum()), 'rmbs_bond')\n", " \n", "portf.value_date = shock_date\n", "portf.mark(interp_method=\"bivariate_linear\")\n", @@ -195,7 +202,7 @@ " corr_shock=corr_shock,\n", " vol_surface=vol_surface)\n", "\n", - "scens.xs('pnl', level=1).sum(axis=1)" + "scens.xs('pnl', level=1, axis=1).sum(axis=1)" ] }, { @@ -205,12 +212,20 @@ "outputs": [], "source": [ "spread_shock = np.arange(-.4, 2.2, .2)\n", - "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\"],\n", + "\n", + "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\", \"hy_equiv\"],\n", " spread_shock=spread_shock,\n", " vol_shock=vol_shock,\n", " corr_shock=corr_shock,\n", " vol_surface=vol_surface)\n", - "scens.sum(axis=1)\n", + "results = {}\n", + "for x in ['pnl', 'hy_equiv']:\n", + " df = scens.xs(x, level=1, axis=1)\n", + " for y in ['trn', 'opt', 'index']:\n", + " columns = [col for col in df.columns if 'trn' in col]\n", + " results[(x,y)] = df[columns].sum(axis=1)\n", + " \n", + "hy_equiv = scens.xs('hy_equiv', level=1, axis=1).sum(axis=1)\n", "\n", "#risk_notional = [t.notional * t._index.duration for t in portf.indices]\n", "#portf.trades[0]._index.duration()" |
