diff options
| -rw-r--r-- | python/analytics/scenarios.py | 1 | ||||
| -rw-r--r-- | python/graphics.py | 74 | ||||
| -rw-r--r-- | python/notebooks/Option Trades.ipynb | 153 | ||||
| -rw-r--r-- | python/notebooks/Valuation Backtest.ipynb | 15 |
4 files changed, 140 insertions, 103 deletions
diff --git a/python/analytics/scenarios.py b/python/analytics/scenarios.py index 1ebf9371..037f5d9b 100644 --- a/python/analytics/scenarios.py +++ b/python/analytics/scenarios.py @@ -120,6 +120,7 @@ def run_portfolio_scenarios(portf, date_range, params=["pnl"], **kwargs): if nproc > 0 run with nproc processes. """ d = {} + portf = deepcopy(portf) for date in date_range: portf.value_date = date.date() d[date] = join_dfs(portf.shock(params, **kwargs)) diff --git a/python/graphics.py b/python/graphics.py index 75740081..9fa5a1ac 100644 --- a/python/graphics.py +++ b/python/graphics.py @@ -58,62 +58,38 @@ def shiftedColorMap(cmap, start=0, midpoint=0.5, stop=1.0, name='shiftedcmap'): return newcmap -def plot_time_color_map(df, spread_shock, attr="pnl", path=".", color_map=cm.RdYlGn, index='IG', centered = True): - - val_date = df.index[0].date() - df = df.reset_index() - df['days'] = (df['date'].dt.date - val_date).dt.days - ascending = [True,True] if index == 'HY' else [True,False] - df.sort_values(by=['date','spread'], ascending = ascending, inplace = True) - date_range = df.days.unique() - - #plt.style.use('seaborn-whitegrid') - fig, ax = plt.subplots() - series = df[attr] - if centered is True: - midpoint = 1 - series.max() / (series.max() + abs(series.min())) - shifted_cmap = shiftedColorMap(color_map, midpoint=midpoint, name='shifted') - else: - shifted_cmap = color_map - - chart = ax.imshow(series.values.reshape(date_range.size, spread_shock.size).T, - extent=(date_range.min(), date_range.max(), - spread_shock.min(), spread_shock.max()), - aspect='auto', interpolation='bilinear', cmap=shifted_cmap) - - #chart = ax.contour(date_range, spread_shock, series.values.reshape(date_range.size, spread_shock.size).T) +def plot_color_map(series, sort_order = [True,True], color_map=cm.RdYlGn, centered = True): + ''' + 2D heat map - if x-axis is time translate to days instead - ax.set_xlabel('Days') - ax.set_ylabel('Price') if index == 'HY' else ax.set_ylabel('Spread') - ax.set_title('{} of Trade'.format(attr.title())) + Parameters + ----- + series: Series with multilevel index (x: first level, y: second level) + sort_order: sorting in the x,y axis + color_map: default Red-Yellow-Green + centered: center yellow as 0 of the series. + ''' - fig.colorbar(chart, shrink=.8) - #fig.savefig(os.path.join(path, "spread_time_color_map_"+ attr+ "_{}.png".format(val_date))) + x = series.index.get_level_values(0) + y = series.index.get_level_values(1) + if x.dtype == '<M8[ns]': + x = (x - x[0]).days + x.name = 'Days' + series.sort_index(ascending = sort_order, inplace = True) -def plot_color_map(df, spread_shock, vol_shock, attr="pnl", path=".", index='IG'): - # TODO: merge with plot_time_color_map - val_date = df.index[0].date() - #rows are spread, columns are volatility surface shift fig, ax = plt.subplots() - #We are plotting an image, so we have to sort from high to low on the Y axis - ascending = [False,False] if index == 'HY' else [True,False] - df.sort_values(by=['spread','vol_shock'], ascending=ascending, inplace=True) - series = df[attr] - - midpoint = 1 - series.max() / (series.max() + abs(series.min())) - shifted_cmap = shiftedColorMap(cm.RdYlGn, midpoint=midpoint, name='shifted') - - chart = ax.imshow(series.values.reshape(spread_shock.size, vol_shock.size).T, - extent=(spread_shock.min(), spread_shock.max(), - vol_shock.min(), vol_shock.max()), - aspect='auto', interpolation='bilinear', cmap=shifted_cmap) + midpoint = 1 - series.max() / (series.max() \ + + abs(series.min())) if centered is True else 0.5 + color_map = shiftedColorMap(color_map, midpoint=midpoint) - ax.set_xlabel('Price') if index == 'HY' else ax.set_xlabel('Spread') - ax.set_ylabel('Volatility shock') - ax.set_title('{} of Trade on {}'.format(attr.title(), val_date)) + chart = ax.imshow(series.values.reshape(x.unique().size, y.unique().size).T, + extent=(x.min(), x.max(), y.min(), y.max()), + aspect='auto', interpolation='bilinear', cmap=color_map) + ax.set_xlabel(x.name) + ax.set_ylabel(y.name) + ax.set_title('{} of Trade'.format(series.name)) fig.colorbar(chart, shrink=.8) - #fig.savefig(os.path.join(path, "vol_spread_color_map"+ attr+ "_{}.png".format(val_date))) def plot_prob_map(df, attr="pnl", path=".", color_map=cm.RdYlGn, index='IG'): diff --git a/python/notebooks/Option Trades.ipynb b/python/notebooks/Option Trades.ipynb index 53ceb04c..d43d86ee 100644 --- a/python/notebooks/Option Trades.ipynb +++ b/python/notebooks/Option Trades.ipynb @@ -10,9 +10,9 @@ "import pandas as pd\n", "import numpy as np\n", "\n", - "from graphics import plot_time_color_map, plot_color_map\n", - "from analytics import Swaption, BlackSwaption, BlackSwaptionVolSurface, Index, Portfolio\n", - "from analytics.scenarios import run_swaption_scenarios, run_index_scenarios, run_portfolio_scenarios\n", + "from graphics import plot_color_map\n", + "from analytics import Swaption, BlackSwaption, BlackSwaptionVolSurface, CreditIndex, Portfolio\n", + "from analytics.scenarios import run_swaption_scenarios, run_index_scenarios, run_portfolio_scenarios, run_portfolio_scenarios_module\n", "from scipy.interpolate import SmoothBivariateSpline\n", "from db import dbengine" ] @@ -32,29 +32,48 @@ "metadata": {}, "outputs": [], "source": [ - "def plot_trade_scenarios(portf, shock_min=-.15, shock_max=.2, period=-1, vol_time_roll=True):\n", + "def plot_trade_scenarios(portf, shock_min=-.15, shock_max=.2, vol_time_roll=True):\n", " portf.reset_pv()\n", - " earliest_date = min(portf.swaptions, key=lambda x: x.exercise_date).exercise_date\n", - " date_range = pd.bdate_range(portf.indices[0].value_date,\n", - " earliest_date - pd.tseries.offsets.BDay(), freq='3B')\n", - " vol_shock = np.arange(-0.15, 0.3, 0.01)\n", + " end_date = min(portf.swaptions, key=lambda x: x.exercise_date).exercise_date\n", + " date_range = pd.bdate_range(portf.value_date,\n", + " end_date - pd.tseries.offsets.BDay(), freq='3B')\n", + " vol_shock = np.arange(-.15, .31, 0.01)\n", " spread_shock = np.arange(shock_min, shock_max, 0.01)\n", - " index = portf.indices[0].name.split()[1]\n", - " series = portf.indices[0].name.split()[3][1:]\n", - " vs = BlackSwaptionVolSurface(index, series, value_date=portf.indices[0].value_date)\n", + " index = portf.indices[0].index_type\n", + " vs = BlackSwaptionVolSurface(index, portf.indices[0].series, \n", + " value_date=portf.value_date)\n", " vol_surface = vs[vs.list(option_type='payer')[-1]]\n", + " \n", + " df = run_portfolio_scenarios(portf, date_range, params=[\"pnl\",\"delta\"],\n", + " spread_shock = spread_shock,\n", + " vol_shock = vol_shock,\n", + " vol_surface = vol_surface)\n", + " df = df.reset_index()\n", + " df.vol_shock = df.vol_shock.round(2)\n", "\n", - " df = run_portfolio_scenarios(portf, date_range, spread_shock, vol_shock, vol_surface,\n", - " params=[\"pnl\",\"delta\"])\n", - "\n", - " hy_plot_range = 100 + (500 - portf.indices[0].spread * (1 + spread_shock)) * \\\n", - " abs(portf.indices[0].DV01) / portf.indices[0].notional * 100\n", + " if index == 'HY':\n", + " df['price'] = 100 + (500 - portf.indices[0].spread * (1 + df.spread_shock)) \\\n", + " * abs(portf.indices[0].DV01) / portf.indices[0].notional * 100\n", + " df = df.set_index(['date', 'price', 'vol_shock'])\n", + " sort_order = [True, False]\n", + " else:\n", + " ss_df['spread'] = portf.indices[0].spread * (1 + df.spread_shock)\n", + " df = df.set_index(['date', 'spread', 'vol_shock'])\n", + " sort_order = [True, True]\n", + " \n", + " pnl = df.xs('pnl', axis=1, level=1).sum(axis=1)\n", + " for trade_id, t in portf.items():\n", + " if isinstance(t, BlackSwaption):\n", + " df[(trade_id, 'delta')] *= -t.notional \n", + " delta = df.xs('delta', axis=1, level=1).sum(axis=1).xs(0, level='vol_shock')\n", + " delta += sum([x.notional * -1 if x.direction == 'Buyer' else 1 for x in portf.indices])\n", "\n", - " shock = hy_plot_range if index == 'HY' else portf.indices[0].spread * (1 + spread_shock)\n", + " pnl.name = 'pnl'\n", + " delta.name = 'delta'\n", "\n", - " plot_time_color_map(df[round(df.vol_shock,2)==0], shock, 'pnl', index=index)\n", - " plot_time_color_map(df[round(df.vol_shock,2)==.2], shock, 'pnl', index=index)\n", - " plot_color_map(df.loc[date_range[period]], shock, vol_shock, 'pnl', index=index)" + " plot_color_map(pnl.xs(0, level='vol_shock'), sort_order)\n", + " plot_color_map(delta, sort_order)\n", + " plot_color_map(pnl.loc[date_range[-1]], sort_order)" ] }, { @@ -64,16 +83,20 @@ "outputs": [], "source": [ "#Ad hoc\n", - "option_delta = Index.from_name('IG', 30, '5yr', value_date=datetime.date(2018, 5, 17))\n", - "option_delta.spread = 61\n", - "option1 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 60, option_type=\"payer\")\n", - "option2 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 80, option_type=\"payer\")\n", - "option3 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 80, option_type=\"payer\")\n", - "option1.sigma = .381\n", - "option2.sigma = .545\n", + "index = 'HY'\n", + "series = 30\n", + "value_date = datetime.date(2018, 6, 7)\n", + "option_delta = CreditIndex(index, series, '5yr', value_date)\n", + "#option_delta.spread = 66\n", + "option_delta.price = 106.75\n", + "option1 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 103, option_type=\"payer\")\n", + "option2 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 101.5, option_type=\"payer\")\n", + "option3 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 100, option_type=\"payer\")\n", + "option1.sigma = .47\n", + "option2.sigma = .53\n", "option3.sigma = .69\n", - "option1.notional = 100_000_000\n", - "option2.notional = 300_000_000\n", + "option1.notional = 50_000_000\n", + "option2.notional = 100_000_000\n", "option3.notional = 1\n", "option1.direction = 'Long'\n", "option2.direction = 'Short'\n", @@ -82,7 +105,7 @@ "option_delta.notional = option1.notional * option1.delta + option2.notional * option2.delta + option3.notional * option3.delta\n", "option_delta.direction = 'Seller' if option_delta.notional > 0 else 'Buyer'\n", "option_delta.notional = abs(option_delta.notional)\n", - "portf = Portfolio([option1, option2, option3, option_delta])\n", + "portf = Portfolio([option1, option2, option3, option_delta], trade_ids=['opt1', 'opt2', 'opt3', 'delta'])\n", "#Plot Scenarios Inputs: Portfolio, spread shock tightening%, spread shock widening%, snapshot period)\n", "portf" ] @@ -93,7 +116,7 @@ "metadata": {}, "outputs": [], "source": [ - "plot_trade_scenarios(portf, -.15, .8, -4, vol_time_roll=False)" + "plot_trade_scenarios(portf, -.15, .8, vol_time_roll=False)" ] }, { @@ -103,44 +126,44 @@ "outputs": [], "source": [ "#Dec Jan 2017 Trade\n", - "option_delta = Index.from_tradeid(864)\n", + "option_delta = CreditIndex.from_tradeid(864)\n", "option1 = BlackSwaption.from_tradeid(3, option_delta)\n", "option2 = BlackSwaption.from_tradeid(4, option_delta)\n", - "portf = Portfolio([option1, option2, option_delta])\n", + "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])\n", "#plot_trade_scenarios(portf)\n", "\n", "#Feb 2017: Sell May Buy April Calendar Trade\n", - "option_delta = Index.from_tradeid(870)\n", + "option_delta = CreditIndex.from_tradeid(870)\n", "option1 = BlackSwaption.from_tradeid(5, option_delta)\n", "option2 = BlackSwaption.from_tradeid(6, option_delta)\n", - "portf = Portfolio([option1, option2, option_delta])\n", + "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])\n", "#plot_trade_scenarios(portf)\n", "\n", "#April 2017: Sell May Buy June Calendar Trade\n", - "option_delta = Index.from_tradeid(874)\n", + "option_delta = CreditIndex.from_tradeid(874)\n", "option1 = BlackSwaption.from_tradeid(7, option_delta)\n", "option2 = BlackSwaption.from_tradeid(8, option_delta)\n", - "portf = Portfolio([option1, option2, option_delta])\n", + "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])\n", "#plot_trade_scenarios(portf)\n", "\n", "#June July 2017 Calendar Trade\n", - "option_delta_pf = Index.from_tradeid(874)\n", - "option_delta2_pf = Index.from_tradeid(879)\n", + "option_delta_pf = CreditIndex.from_tradeid(874)\n", + "option_delta2_pf = CreditIndex.from_tradeid(879)\n", "\n", "option1_pf = BlackSwaption.from_tradeid(7, option_delta_pf)\n", "option2_pf = BlackSwaption.from_tradeid(9, option_delta_pf)\n", "option_delta_pf.notional = 50_335_169\n", "\n", - "portf = Portfolio([option1_pf, option2_pf, option_delta_pf])\n", + "portf = Portfolio([option1_pf, option2_pf, option_delta_pf], trade_ids=['opt1', 'opt2', 'delta'])\n", "portf.value_date = datetime.date(2017, 5, 17)\n", "portf.mark()\n", "#plot_trade_scenarios(portf)\n", "\n", "#July 2017: Buy Sept HY payer spread\n", - "option_delta = Index.from_tradeid(891)\n", + "option_delta = CreditIndex.from_tradeid(891)\n", "option1 = BlackSwaption.from_tradeid(10, option_delta)\n", "option2 = BlackSwaption.from_tradeid(11, option_delta)\n", - "portf = Portfolio([option1, option2, option_delta])\n", + "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])\n", "#plot_trade_scenarios(portf)" ] }, @@ -150,11 +173,16 @@ "metadata": {}, "outputs": [], "source": [ - "#Look at steepness of volatility - 90 days, .75 vs .25 payer deltas\n", + "#Look at steepness of volatility - 30 days, .85 vs .15 payer deltas on HY\n", + "days = 30\n", + "delta1 = .85\n", + "delta2 = .15\n", + "index = 'HY'\n", + "\n", "sql_str = \"select b.quotedate, b.ref, b.ref_id, b.expiry, a.delta_pay, a.vol from \" \\\n", - " \"swaption_quotes a join swaption_ref_quotes b on a.ref_id = b.ref_id and index = 'IG'\"\n", + " \"swaption_quotes a join swaption_ref_quotes b on a.ref_id = b.ref_id and index = %s\"\n", "df = pd.read_sql_query(sql_str, dbengine('serenitasdb'), \n", - " index_col=['quotedate'], parse_dates={'quotedate': {'utc': True}})\n", + " index_col=['quotedate'], parse_dates={'quotedate': {'utc': True}}, params=[index])\n", "df['days_expiry'] = (df.expiry - df.index.date).dt.days\n", "r_1 = []\n", "for i, g in df.groupby(pd.Grouper(freq='D', level='quotedate')):\n", @@ -164,7 +192,7 @@ " if len(r) > 0:\n", " temp = np.dstack(r)\n", " f = SmoothBivariateSpline(temp[0][0], temp[0][1], temp[0][2])\n", - " r = (f(90, .75) - f(90, .25))[0][0]\n", + " r = (f(days, delta1) - f(days, delta2))[0][0]\n", " r_1.append([i, r])\n", " else:\n", " pass\n", @@ -177,6 +205,37 @@ "execution_count": null, "metadata": {}, "outputs": [], + "source": [ + "#Current Positions\n", + "#option_delta = CreditIndex.from_tradeid(945)\n", + "index = 'HY'\n", + "series = 30\n", + "option_delta = CreditIndex(index, series, '5yr', value_date=datetime.date(2018, 6, 7))\n", + "#option_delta.spread = 66\n", + "option_delta.price = 106.75\n", + "option_delta.notional = 1\n", + "option1 = BlackSwaption.from_tradeid(14, option_delta)\n", + "option2 = BlackSwaption.from_tradeid(15, option_delta)\n", + "#option3 = BlackSwaption.from_tradeid(16, option_delta)\n", + "#option4 = BlackSwaption.from_tradeid(17, option_delta)\n", + "#portf = Portfolio([option1, option2, option3, option4, option_delta])\n", + "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])\n", + "portf.mark()\n", + "plot_trade_scenarios(portf, shock_min=-.15, shock_max=.4)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [] } ], @@ -196,7 +255,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.5" + "version": "3.6.6" } }, "nbformat": 4, diff --git a/python/notebooks/Valuation Backtest.ipynb b/python/notebooks/Valuation Backtest.ipynb index 870fb33d..77f709b8 100644 --- a/python/notebooks/Valuation Backtest.ipynb +++ b/python/notebooks/Valuation Backtest.ipynb @@ -6,9 +6,9 @@ "metadata": {}, "outputs": [], "source": [ - "from datetime import datetime\n", "from db import dbengine\n", "\n", + "import datetime\n", "import mark_backtest_underpar as mark\n", "import globeop_reports as ops\n", "import pandas as pd\n", @@ -23,8 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "#exclude sell price that are over 200\n", - "df_long = mark.back_test('2013-01-01', '2018-01-01', sell_price_threshold = 200)" + "date = datetime.date.today() - pd.tseries.offsets.MonthEnd(1)" ] }, { @@ -33,6 +32,8 @@ "metadata": {}, "outputs": [], "source": [ + "#exclude sell price that are over 200\n", + "df_long = mark.back_test('2013-01-01', '2018-12-01', sell_price_threshold = 200)\n", "df_long = df_long[df_long.source != 'PB']" ] }, @@ -95,7 +96,6 @@ "metadata": {}, "outputs": [], "source": [ - "%matplotlib inline\n", "mark.count_sources(df)" ] }, @@ -208,14 +208,15 @@ "outputs": [], "source": [ "#Portfolio MTM Gains/Loss/Net\n", - "df_pnl = ops.get_monthly_pnl()[:date]\n", + "df_pnl = ops.get_monthly_pnl()[:date][['mtdbookunrealmtm', 'mtdbookrealmtm']].sum(axis=1)\n", + "df_pnl.name = 'mtm'\n", "r=[]\n", "for d, g in df_pnl.reset_index('identifier').groupby(pd.Grouper(freq='M')):\n", " sql_string = \"SELECT * FROM risk_positions(%s, 'Subprime') WHERE notional > 0\"\n", " pos = pd.read_sql_query(sql_string, engine, params=[g.index[-1].date()])\n", " pos.identifier = pos.identifier.str[:9]\n", - " pos = pos.merge(df_pnl.groupby('identifier').cumsum().loc[g.index[-1]],\n", - " on='identifier')['mtdtotalbookpl'] / nav.loc[d]\n", + " pos = pos.join(df_pnl.groupby('identifier').cumsum().loc[g.index[-1]],\n", + " on='identifier')['mtm'] / nav.loc[d]\n", " r.append([g.index[-1], pos[pos>=0].sum(), pos[pos<0].sum()])\n", "summary = pd.DataFrame.from_records(r, index='date', columns=['date','gains','loss'])\n", "summary['Net'] = summary.gains + summary.loss\n", |
