aboutsummaryrefslogtreecommitdiffstats
path: root/python/notebooks
diff options
context:
space:
mode:
Diffstat (limited to 'python/notebooks')
-rw-r--r--python/notebooks/Option Trades.ipynb153
-rw-r--r--python/notebooks/Valuation Backtest.ipynb15
2 files changed, 114 insertions, 54 deletions
diff --git a/python/notebooks/Option Trades.ipynb b/python/notebooks/Option Trades.ipynb
index 53ceb04c..d43d86ee 100644
--- a/python/notebooks/Option Trades.ipynb
+++ b/python/notebooks/Option Trades.ipynb
@@ -10,9 +10,9 @@
"import pandas as pd\n",
"import numpy as np\n",
"\n",
- "from graphics import plot_time_color_map, plot_color_map\n",
- "from analytics import Swaption, BlackSwaption, BlackSwaptionVolSurface, Index, Portfolio\n",
- "from analytics.scenarios import run_swaption_scenarios, run_index_scenarios, run_portfolio_scenarios\n",
+ "from graphics import plot_color_map\n",
+ "from analytics import Swaption, BlackSwaption, BlackSwaptionVolSurface, CreditIndex, Portfolio\n",
+ "from analytics.scenarios import run_swaption_scenarios, run_index_scenarios, run_portfolio_scenarios, run_portfolio_scenarios_module\n",
"from scipy.interpolate import SmoothBivariateSpline\n",
"from db import dbengine"
]
@@ -32,29 +32,48 @@
"metadata": {},
"outputs": [],
"source": [
- "def plot_trade_scenarios(portf, shock_min=-.15, shock_max=.2, period=-1, vol_time_roll=True):\n",
+ "def plot_trade_scenarios(portf, shock_min=-.15, shock_max=.2, vol_time_roll=True):\n",
" portf.reset_pv()\n",
- " earliest_date = min(portf.swaptions, key=lambda x: x.exercise_date).exercise_date\n",
- " date_range = pd.bdate_range(portf.indices[0].value_date,\n",
- " earliest_date - pd.tseries.offsets.BDay(), freq='3B')\n",
- " vol_shock = np.arange(-0.15, 0.3, 0.01)\n",
+ " end_date = min(portf.swaptions, key=lambda x: x.exercise_date).exercise_date\n",
+ " date_range = pd.bdate_range(portf.value_date,\n",
+ " end_date - pd.tseries.offsets.BDay(), freq='3B')\n",
+ " vol_shock = np.arange(-.15, .31, 0.01)\n",
" spread_shock = np.arange(shock_min, shock_max, 0.01)\n",
- " index = portf.indices[0].name.split()[1]\n",
- " series = portf.indices[0].name.split()[3][1:]\n",
- " vs = BlackSwaptionVolSurface(index, series, value_date=portf.indices[0].value_date)\n",
+ " index = portf.indices[0].index_type\n",
+ " vs = BlackSwaptionVolSurface(index, portf.indices[0].series, \n",
+ " value_date=portf.value_date)\n",
" vol_surface = vs[vs.list(option_type='payer')[-1]]\n",
+ " \n",
+ " df = run_portfolio_scenarios(portf, date_range, params=[\"pnl\",\"delta\"],\n",
+ " spread_shock = spread_shock,\n",
+ " vol_shock = vol_shock,\n",
+ " vol_surface = vol_surface)\n",
+ " df = df.reset_index()\n",
+ " df.vol_shock = df.vol_shock.round(2)\n",
"\n",
- " df = run_portfolio_scenarios(portf, date_range, spread_shock, vol_shock, vol_surface,\n",
- " params=[\"pnl\",\"delta\"])\n",
- "\n",
- " hy_plot_range = 100 + (500 - portf.indices[0].spread * (1 + spread_shock)) * \\\n",
- " abs(portf.indices[0].DV01) / portf.indices[0].notional * 100\n",
+ " if index == 'HY':\n",
+ " df['price'] = 100 + (500 - portf.indices[0].spread * (1 + df.spread_shock)) \\\n",
+ " * abs(portf.indices[0].DV01) / portf.indices[0].notional * 100\n",
+ " df = df.set_index(['date', 'price', 'vol_shock'])\n",
+ " sort_order = [True, False]\n",
+ " else:\n",
+ " ss_df['spread'] = portf.indices[0].spread * (1 + df.spread_shock)\n",
+ " df = df.set_index(['date', 'spread', 'vol_shock'])\n",
+ " sort_order = [True, True]\n",
+ " \n",
+ " pnl = df.xs('pnl', axis=1, level=1).sum(axis=1)\n",
+ " for trade_id, t in portf.items():\n",
+ " if isinstance(t, BlackSwaption):\n",
+ " df[(trade_id, 'delta')] *= -t.notional \n",
+ " delta = df.xs('delta', axis=1, level=1).sum(axis=1).xs(0, level='vol_shock')\n",
+ " delta += sum([x.notional * -1 if x.direction == 'Buyer' else 1 for x in portf.indices])\n",
"\n",
- " shock = hy_plot_range if index == 'HY' else portf.indices[0].spread * (1 + spread_shock)\n",
+ " pnl.name = 'pnl'\n",
+ " delta.name = 'delta'\n",
"\n",
- " plot_time_color_map(df[round(df.vol_shock,2)==0], shock, 'pnl', index=index)\n",
- " plot_time_color_map(df[round(df.vol_shock,2)==.2], shock, 'pnl', index=index)\n",
- " plot_color_map(df.loc[date_range[period]], shock, vol_shock, 'pnl', index=index)"
+ " plot_color_map(pnl.xs(0, level='vol_shock'), sort_order)\n",
+ " plot_color_map(delta, sort_order)\n",
+ " plot_color_map(pnl.loc[date_range[-1]], sort_order)"
]
},
{
@@ -64,16 +83,20 @@
"outputs": [],
"source": [
"#Ad hoc\n",
- "option_delta = Index.from_name('IG', 30, '5yr', value_date=datetime.date(2018, 5, 17))\n",
- "option_delta.spread = 61\n",
- "option1 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 60, option_type=\"payer\")\n",
- "option2 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 80, option_type=\"payer\")\n",
- "option3 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 80, option_type=\"payer\")\n",
- "option1.sigma = .381\n",
- "option2.sigma = .545\n",
+ "index = 'HY'\n",
+ "series = 30\n",
+ "value_date = datetime.date(2018, 6, 7)\n",
+ "option_delta = CreditIndex(index, series, '5yr', value_date)\n",
+ "#option_delta.spread = 66\n",
+ "option_delta.price = 106.75\n",
+ "option1 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 103, option_type=\"payer\")\n",
+ "option2 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 101.5, option_type=\"payer\")\n",
+ "option3 = BlackSwaption(option_delta, datetime.date(2018, 8, 15), 100, option_type=\"payer\")\n",
+ "option1.sigma = .47\n",
+ "option2.sigma = .53\n",
"option3.sigma = .69\n",
- "option1.notional = 100_000_000\n",
- "option2.notional = 300_000_000\n",
+ "option1.notional = 50_000_000\n",
+ "option2.notional = 100_000_000\n",
"option3.notional = 1\n",
"option1.direction = 'Long'\n",
"option2.direction = 'Short'\n",
@@ -82,7 +105,7 @@
"option_delta.notional = option1.notional * option1.delta + option2.notional * option2.delta + option3.notional * option3.delta\n",
"option_delta.direction = 'Seller' if option_delta.notional > 0 else 'Buyer'\n",
"option_delta.notional = abs(option_delta.notional)\n",
- "portf = Portfolio([option1, option2, option3, option_delta])\n",
+ "portf = Portfolio([option1, option2, option3, option_delta], trade_ids=['opt1', 'opt2', 'opt3', 'delta'])\n",
"#Plot Scenarios Inputs: Portfolio, spread shock tightening%, spread shock widening%, snapshot period)\n",
"portf"
]
@@ -93,7 +116,7 @@
"metadata": {},
"outputs": [],
"source": [
- "plot_trade_scenarios(portf, -.15, .8, -4, vol_time_roll=False)"
+ "plot_trade_scenarios(portf, -.15, .8, vol_time_roll=False)"
]
},
{
@@ -103,44 +126,44 @@
"outputs": [],
"source": [
"#Dec Jan 2017 Trade\n",
- "option_delta = Index.from_tradeid(864)\n",
+ "option_delta = CreditIndex.from_tradeid(864)\n",
"option1 = BlackSwaption.from_tradeid(3, option_delta)\n",
"option2 = BlackSwaption.from_tradeid(4, option_delta)\n",
- "portf = Portfolio([option1, option2, option_delta])\n",
+ "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])\n",
"#plot_trade_scenarios(portf)\n",
"\n",
"#Feb 2017: Sell May Buy April Calendar Trade\n",
- "option_delta = Index.from_tradeid(870)\n",
+ "option_delta = CreditIndex.from_tradeid(870)\n",
"option1 = BlackSwaption.from_tradeid(5, option_delta)\n",
"option2 = BlackSwaption.from_tradeid(6, option_delta)\n",
- "portf = Portfolio([option1, option2, option_delta])\n",
+ "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])\n",
"#plot_trade_scenarios(portf)\n",
"\n",
"#April 2017: Sell May Buy June Calendar Trade\n",
- "option_delta = Index.from_tradeid(874)\n",
+ "option_delta = CreditIndex.from_tradeid(874)\n",
"option1 = BlackSwaption.from_tradeid(7, option_delta)\n",
"option2 = BlackSwaption.from_tradeid(8, option_delta)\n",
- "portf = Portfolio([option1, option2, option_delta])\n",
+ "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])\n",
"#plot_trade_scenarios(portf)\n",
"\n",
"#June July 2017 Calendar Trade\n",
- "option_delta_pf = Index.from_tradeid(874)\n",
- "option_delta2_pf = Index.from_tradeid(879)\n",
+ "option_delta_pf = CreditIndex.from_tradeid(874)\n",
+ "option_delta2_pf = CreditIndex.from_tradeid(879)\n",
"\n",
"option1_pf = BlackSwaption.from_tradeid(7, option_delta_pf)\n",
"option2_pf = BlackSwaption.from_tradeid(9, option_delta_pf)\n",
"option_delta_pf.notional = 50_335_169\n",
"\n",
- "portf = Portfolio([option1_pf, option2_pf, option_delta_pf])\n",
+ "portf = Portfolio([option1_pf, option2_pf, option_delta_pf], trade_ids=['opt1', 'opt2', 'delta'])\n",
"portf.value_date = datetime.date(2017, 5, 17)\n",
"portf.mark()\n",
"#plot_trade_scenarios(portf)\n",
"\n",
"#July 2017: Buy Sept HY payer spread\n",
- "option_delta = Index.from_tradeid(891)\n",
+ "option_delta = CreditIndex.from_tradeid(891)\n",
"option1 = BlackSwaption.from_tradeid(10, option_delta)\n",
"option2 = BlackSwaption.from_tradeid(11, option_delta)\n",
- "portf = Portfolio([option1, option2, option_delta])\n",
+ "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])\n",
"#plot_trade_scenarios(portf)"
]
},
@@ -150,11 +173,16 @@
"metadata": {},
"outputs": [],
"source": [
- "#Look at steepness of volatility - 90 days, .75 vs .25 payer deltas\n",
+ "#Look at steepness of volatility - 30 days, .85 vs .15 payer deltas on HY\n",
+ "days = 30\n",
+ "delta1 = .85\n",
+ "delta2 = .15\n",
+ "index = 'HY'\n",
+ "\n",
"sql_str = \"select b.quotedate, b.ref, b.ref_id, b.expiry, a.delta_pay, a.vol from \" \\\n",
- " \"swaption_quotes a join swaption_ref_quotes b on a.ref_id = b.ref_id and index = 'IG'\"\n",
+ " \"swaption_quotes a join swaption_ref_quotes b on a.ref_id = b.ref_id and index = %s\"\n",
"df = pd.read_sql_query(sql_str, dbengine('serenitasdb'), \n",
- " index_col=['quotedate'], parse_dates={'quotedate': {'utc': True}})\n",
+ " index_col=['quotedate'], parse_dates={'quotedate': {'utc': True}}, params=[index])\n",
"df['days_expiry'] = (df.expiry - df.index.date).dt.days\n",
"r_1 = []\n",
"for i, g in df.groupby(pd.Grouper(freq='D', level='quotedate')):\n",
@@ -164,7 +192,7 @@
" if len(r) > 0:\n",
" temp = np.dstack(r)\n",
" f = SmoothBivariateSpline(temp[0][0], temp[0][1], temp[0][2])\n",
- " r = (f(90, .75) - f(90, .25))[0][0]\n",
+ " r = (f(days, delta1) - f(days, delta2))[0][0]\n",
" r_1.append([i, r])\n",
" else:\n",
" pass\n",
@@ -177,6 +205,37 @@
"execution_count": null,
"metadata": {},
"outputs": [],
+ "source": [
+ "#Current Positions\n",
+ "#option_delta = CreditIndex.from_tradeid(945)\n",
+ "index = 'HY'\n",
+ "series = 30\n",
+ "option_delta = CreditIndex(index, series, '5yr', value_date=datetime.date(2018, 6, 7))\n",
+ "#option_delta.spread = 66\n",
+ "option_delta.price = 106.75\n",
+ "option_delta.notional = 1\n",
+ "option1 = BlackSwaption.from_tradeid(14, option_delta)\n",
+ "option2 = BlackSwaption.from_tradeid(15, option_delta)\n",
+ "#option3 = BlackSwaption.from_tradeid(16, option_delta)\n",
+ "#option4 = BlackSwaption.from_tradeid(17, option_delta)\n",
+ "#portf = Portfolio([option1, option2, option3, option4, option_delta])\n",
+ "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])\n",
+ "portf.mark()\n",
+ "plot_trade_scenarios(portf, shock_min=-.15, shock_max=.4)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
"source": []
}
],
@@ -196,7 +255,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.6.5"
+ "version": "3.6.6"
}
},
"nbformat": 4,
diff --git a/python/notebooks/Valuation Backtest.ipynb b/python/notebooks/Valuation Backtest.ipynb
index 870fb33d..77f709b8 100644
--- a/python/notebooks/Valuation Backtest.ipynb
+++ b/python/notebooks/Valuation Backtest.ipynb
@@ -6,9 +6,9 @@
"metadata": {},
"outputs": [],
"source": [
- "from datetime import datetime\n",
"from db import dbengine\n",
"\n",
+ "import datetime\n",
"import mark_backtest_underpar as mark\n",
"import globeop_reports as ops\n",
"import pandas as pd\n",
@@ -23,8 +23,7 @@
"metadata": {},
"outputs": [],
"source": [
- "#exclude sell price that are over 200\n",
- "df_long = mark.back_test('2013-01-01', '2018-01-01', sell_price_threshold = 200)"
+ "date = datetime.date.today() - pd.tseries.offsets.MonthEnd(1)"
]
},
{
@@ -33,6 +32,8 @@
"metadata": {},
"outputs": [],
"source": [
+ "#exclude sell price that are over 200\n",
+ "df_long = mark.back_test('2013-01-01', '2018-12-01', sell_price_threshold = 200)\n",
"df_long = df_long[df_long.source != 'PB']"
]
},
@@ -95,7 +96,6 @@
"metadata": {},
"outputs": [],
"source": [
- "%matplotlib inline\n",
"mark.count_sources(df)"
]
},
@@ -208,14 +208,15 @@
"outputs": [],
"source": [
"#Portfolio MTM Gains/Loss/Net\n",
- "df_pnl = ops.get_monthly_pnl()[:date]\n",
+ "df_pnl = ops.get_monthly_pnl()[:date][['mtdbookunrealmtm', 'mtdbookrealmtm']].sum(axis=1)\n",
+ "df_pnl.name = 'mtm'\n",
"r=[]\n",
"for d, g in df_pnl.reset_index('identifier').groupby(pd.Grouper(freq='M')):\n",
" sql_string = \"SELECT * FROM risk_positions(%s, 'Subprime') WHERE notional > 0\"\n",
" pos = pd.read_sql_query(sql_string, engine, params=[g.index[-1].date()])\n",
" pos.identifier = pos.identifier.str[:9]\n",
- " pos = pos.merge(df_pnl.groupby('identifier').cumsum().loc[g.index[-1]],\n",
- " on='identifier')['mtdtotalbookpl'] / nav.loc[d]\n",
+ " pos = pos.join(df_pnl.groupby('identifier').cumsum().loc[g.index[-1]],\n",
+ " on='identifier')['mtm'] / nav.loc[d]\n",
" r.append([g.index[-1], pos[pos>=0].sum(), pos[pos<0].sum()])\n",
"summary = pd.DataFrame.from_records(r, index='date', columns=['date','gains','loss'])\n",
"summary['Net'] = summary.gains + summary.loss\n",