diff options
Diffstat (limited to 'python')
| -rw-r--r-- | python/notebooks/Allocation Reports.ipynb | 4 | ||||
| -rw-r--r-- | python/notebooks/Asset allocation.ipynb | 4 | ||||
| -rw-r--r-- | python/notebooks/Beta & Compression.ipynb | 68 | ||||
| -rw-r--r-- | python/notebooks/Marketing Strategy.ipynb | 4 | ||||
| -rw-r--r-- | python/notebooks/VaR.ipynb | 53 | ||||
| -rw-r--r-- | python/notebooks/bespokes/Phoenix.ipynb | 4 | ||||
| -rw-r--r-- | python/notebooks/clo_loan_markets.ipynb | 10 | ||||
| -rw-r--r-- | python/notebooks/swaption_quotes.ipynb | 4 | ||||
| -rw-r--r-- | python/notebooks/swaption_scenarios.ipynb | 225 | ||||
| -rw-r--r-- | python/notebooks/tranche and swaption portfolio strategy.ipynb | 34 |
10 files changed, 225 insertions, 185 deletions
diff --git a/python/notebooks/Allocation Reports.ipynb b/python/notebooks/Allocation Reports.ipynb index 69e6e620..515a2f65 100644 --- a/python/notebooks/Allocation Reports.ipynb +++ b/python/notebooks/Allocation Reports.ipynb @@ -13,7 +13,7 @@ "import matplotlib.pyplot as plt\n", "import numpy as np\n", "\n", - "from db import dbengine\n", + "from utils.db import dbengine\n", "from yieldcurve import YC\n", "from quantlib.termstructures.yield_term_structure import YieldTermStructure\n", "\n", @@ -213,7 +213,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.3" + "version": "3.8.0" } }, "nbformat": 4, diff --git a/python/notebooks/Asset allocation.ipynb b/python/notebooks/Asset allocation.ipynb index ac122400..747239cc 100644 --- a/python/notebooks/Asset allocation.ipynb +++ b/python/notebooks/Asset allocation.ipynb @@ -214,9 +214,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.2" + "version": "3.8.0" } }, "nbformat": 4, - "nbformat_minor": 0 + "nbformat_minor": 4 } diff --git a/python/notebooks/Beta & Compression.ipynb b/python/notebooks/Beta & Compression.ipynb index 8ddae861..e7874f4e 100644 --- a/python/notebooks/Beta & Compression.ipynb +++ b/python/notebooks/Beta & Compression.ipynb @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "betas = beta_trade.calc_betas(spans=[20,45,90,180], index_list = ['HY','IG','EU'])" + "betas = beta_trade.calc_betas(spans=[20,45,90,180], index_list = ['HY','IG','EU', 'XO'])" ] }, { @@ -50,6 +50,8 @@ "source": [ "# HY -- EU\n", "df = betas.xs('EU', level='index')\n", + "returns = beta_trade.calc_returns(index_list = ['HY','IG','EU'])\n", + "returns.ewm(span=180).cov().groupby(level='date').nth(-1)\n", "df = df.xs('HY', level='index', axis = 1)\n", "plt.plot(df)\n", "plt.xlabel('date')\n", @@ -80,6 +82,24 @@ "metadata": {}, "outputs": [], "source": [ + "# HY -- XO\n", + "df = betas.xs('XO', level='index')\n", + "returns = beta_trade.calc_returns(index_list = ['HY','IG','XO'])\n", + "returns.ewm(span=180).cov().groupby(level='date').nth(-1)\n", + "df = df.xs('HY', level='index', axis = 1)\n", + "plt.plot(df)\n", + "plt.xlabel('date')\n", + "plt.ylabel('beta')\n", + "plt.legend()\n", + "df.iloc[-1]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "#---------------------------------------------------------------------------------------------------------\n", "#now look at spread rato\n", "spreads = beta_trade.spreads_ratio()\n", @@ -122,6 +142,48 @@ "metadata": {}, "outputs": [], "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -140,9 +202,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.4" + "version": "3.8.0" } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/python/notebooks/Marketing Strategy.ipynb b/python/notebooks/Marketing Strategy.ipynb index 7adbb4da..6339f221 100644 --- a/python/notebooks/Marketing Strategy.ipynb +++ b/python/notebooks/Marketing Strategy.ipynb @@ -254,9 +254,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.1" + "version": "3.8.0" } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/python/notebooks/VaR.ipynb b/python/notebooks/VaR.ipynb index ba59d717..abb580d4 100644 --- a/python/notebooks/VaR.ipynb +++ b/python/notebooks/VaR.ipynb @@ -10,8 +10,9 @@ "from analytics.index_data import get_index_quotes\n", "from analytics.scenarios import run_portfolio_scenarios\n", "from analytics import Swaption, BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio, ProbSurface, DualCorrTranche\n", - "from db import dbconn, dbengine\n", + "from utils.db import dbconn, dbengine\n", "\n", + "import analytics\n", "import datetime\n", "import exploration.VaR as var\n", "import pandas as pd\n", @@ -19,8 +20,10 @@ "import globeop_reports as go\n", "\n", "conn = dbconn('dawndb')\n", + "conn.autocommit = True\n", "dawndb = dbengine('dawndb')\n", - "serenitasdb = dbengine('serenitasdb')" + "serenitasdb = dbengine('serenitasdb')\n", + "analytics.init_ontr()" ] }, { @@ -128,8 +131,8 @@ "metadata": {}, "outputs": [], "source": [ - "position_date = (datetime.date.today() - pd.tseries.offsets.BDay(1)).date()\n", - "shock_date = (datetime.date.today() - pd.tseries.offsets.BDay(1)).date()\n", + "position_date = (datetime.date.today() - pd.tseries.offsets.BDay(3)).date()\n", + "shock_date = (datetime.date.today() - pd.tseries.offsets.BDay(3)).date()\n", "(position_date, shock_date)" ] }, @@ -140,47 +143,43 @@ "outputs": [], "source": [ "#Current tranche and swaptions positions\n", - "t_sql_string = (\"SELECT id, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n", + "t_sql_string = (\"SELECT id, folder, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n", " \"OVER (partition by security_id, attach) AS ntl_agg \"\n", " \"FROM cds WHERE swap_type='CD_INDEX_TRANCHE' AND termination_cp IS NULL \"\n", " \"AND trade_date <= %s\")\n", - "swaption_sql_string = (\"select id, security_desc from swaptions where date(expiration_date) \"\n", + "swaption_sql_string = (\"select id, security_desc, folder from swaptions where date(expiration_date) \"\n", " \"> %s and swap_type = 'CD_INDEX_OPTION' \"\n", " \"AND trade_date <= %s AND termination_date iS NULL\")\n", - "index_sql_string = (\"SELECT id, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n", + "index_sql_string = (\"SELECT id, folder, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n", " \"OVER (partition by security_id, attach) AS ntl_agg \"\n", " \"FROM cds WHERE swap_type='CD_INDEX' AND termination_cp IS null \"\n", - " \"AND folder = 'IGOPTDEL' OR folder = 'HYOPTDEL' \"\n", " \"AND trade_date <= %s\")\n", "with conn.cursor() as c:\n", - " #Get Tranche Trade Ids\n", " c.execute(t_sql_string, (position_date,))\n", - " t_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n", - " #Get Swaption Trade Ids\n", + " t_trades = [[dealid, f\"{folder}_{dealid}\"] for dealid, folder, ntl in c if ntl != 0]\n", " c.execute(swaption_sql_string, (position_date, position_date))\n", " swaption_trades = c.fetchall()\n", - " #Get Index/deltas Trade Ids\n", " c.execute(index_sql_string, (position_date,))\n", - " index_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n", + " index_trades = [[dealid, f\"{folder}_{dealid}\"] for dealid, folder, ntl in c if ntl != 0]\n", " \n", - "portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid in t_trade_ids],\n", - " ['trn_'+ str(a) for a in t_trade_ids])\n", - "for row in swaption_trades:\n", - " option_delta = CreditIndex(row[1].split()[1], row[1].split()[3][1:], '5yr', position_date)\n", - " option_delta.mark()\n", - " portf.add_trade(BlackSwaption.from_tradeid(row[0], option_delta), 'opt_' + str(row[0]))\n", - "for index_id in index_trade_ids:\n", - " portf.add_trade(CreditIndex.from_tradeid(index_id), 'index_' + str(index_id))\n", + "portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid, _ in t_trades],\n", + " [trade_id for _, trade_id in t_trades])\n", + "for trade_id, desc, strat in swaption_trades:\n", + " portf.add_trade(BlackSwaption.from_tradeid(trade_id), str(strat) + \"_\" + str(trade_id))\n", + "for trade_id, name in index_trades:\n", + " portf.add_trade(CreditIndex.from_tradeid(trade_id), name)\n", " \n", "#get bond risks:\n", "rmbs_pos = go.rmbs_pos(position_date)\n", + "clo_pos = go.clo_pos(position_date)\n", "r = serenitasdb.execute(\"select duration from on_the_run where index = 'HY' and date = %s\",\n", " shock_date)\n", "duration, = next(r)\n", "rmbs_pos['hy_equiv'] = rmbs_pos['delta_yield']/duration * 100\n", - "notional\n", - "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = shock_date, \n", - " notional = rmbs_pos['hy_equiv'].sum()), 'rmbs_bond')\n", + "notional = rmbs_pos['hy_equiv'].sum() + clo_pos['hy_equiv'].sum()\n", + "temp = CreditIndex('HY', on_the_run('HY'), '5yr', value_date = shock_date, notional = notional)\n", + "temp.direction = 'Seller'\n", + "portf.add_trade(temp, 'rmbs_bond')\n", " \n", "portf.value_date = shock_date\n", "portf.mark(interp_method=\"bivariate_linear\")\n", @@ -190,7 +189,7 @@ "for trade in portf.swaptions:\n", " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n", " value_date=shock_date, interp_method = \"bivariate_linear\")\n", - " vol_surface[trade.index.index_type + trade.index.series] = vs[vs.list(option_type='payer')[-1]]\n", + " vol_surface[(trade.index.index_type, trade.index.series)] = vs[vs.list(option_type='payer')[-1]]\n", "vol_shock = [0]\n", "corr_shock = [0]\n", "spread_shock = tighten + [0] + widen\n", @@ -281,9 +280,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.1" + "version": "3.8.0" } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/python/notebooks/bespokes/Phoenix.ipynb b/python/notebooks/bespokes/Phoenix.ipynb index 3d546576..15e89617 100644 --- a/python/notebooks/bespokes/Phoenix.ipynb +++ b/python/notebooks/bespokes/Phoenix.ipynb @@ -155,9 +155,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.3" + "version": "3.7.4" } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/python/notebooks/clo_loan_markets.ipynb b/python/notebooks/clo_loan_markets.ipynb index 9785d612..43e9410e 100644 --- a/python/notebooks/clo_loan_markets.ipynb +++ b/python/notebooks/clo_loan_markets.ipynb @@ -78,7 +78,15 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "#% under 80/90\n", + "percent_under_80 = df[df['mid']<80].groupby(['latestdate']).agg({'mv': 'sum'})/df.groupby(['latestdate']).agg({'mv': 'sum'})\n", + "percent_under_90 = df[df['mid']<90].groupby(['latestdate']).agg({'mv': 'sum'})/df.groupby(['latestdate']).agg({'mv': 'sum'})\n", + "#wtd average prices\n", + "df['wtd_avg'] = df['mv'] * df['mid']\n", + "wtd_prices = df.dropna().groupby(['latestdate']).agg({'wtd_avg': 'sum'}).join(df.dropna().groupby(['latestdate']).agg({'mv': 'sum'}))\n", + "wtd_prices['price'] = wtd_prices['wtd_avg']/wtd_prices['mv']\n" + ] }, { "cell_type": "code", diff --git a/python/notebooks/swaption_quotes.ipynb b/python/notebooks/swaption_quotes.ipynb index d1018607..e74499fc 100644 --- a/python/notebooks/swaption_quotes.ipynb +++ b/python/notebooks/swaption_quotes.ipynb @@ -70,9 +70,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.1" + "version": "3.8.0" } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } diff --git a/python/notebooks/swaption_scenarios.ipynb b/python/notebooks/swaption_scenarios.ipynb index e632d922..6399d8e4 100644 --- a/python/notebooks/swaption_scenarios.ipynb +++ b/python/notebooks/swaption_scenarios.ipynb @@ -18,6 +18,8 @@ "from scipy.interpolate import SmoothBivariateSpline\n", "from utils.db import dbconn, dbengine\n", "from risk.swaptions import get_swaption_portfolio\n", + "from scipy.optimize import brentq\n", + "from pandas.tseries.offsets import BDay\n", "\n", "conn = dbconn('dawndb')\n", "dawn_engine = dbengine('dawndb')\n", @@ -32,50 +34,45 @@ "metadata": {}, "outputs": [], "source": [ + "############# Current portfolio one day PNL/Delta scenario\n", "portf = get_swaption_portfolio(datetime.date.today() - pd.offsets.BDay(), conn, source_list=['GS'])\n", "\n", - "hedges = pd.read_sql_query(\"SELECT security_id as redcode, maturity, notional, folder FROM list_cds_positions_by_strat(%s) \"\n", + "hedges = pd.read_sql_query(\"SELECT security_desc, notional FROM list_cds_positions_by_strat(%s) \"\n", " \"WHERE folder in ('IGOPTDEL', 'HYOPTDEL')\",\n", " conn, params=(datetime.date.today(),))\n", "\n", "for i, r in hedges.iterrows():\n", - " strategy = r.pop(\"folder\")\n", - " trade_index = CreditIndex(**r, value_date=datetime.date.today() - pd.offsets.BDay())\n", - " trade_index.mark()\n", - " portf.add_trade(trade_index, (strategy, i))\n", + " portf.add_trade(CreditIndex(r['security_desc'].split(\" \")[1],\n", + " r['security_desc'].split(\" \")[3][1:],\n", + " '5yr', value_date=datetime.date.today() - pd.offsets.BDay(),\n", + " notional = r['notional']), ('delta', i))\n", "\n", "vol_surface = {}\n", "for trade in portf.swaptions:\n", - " k = (trade.index.index_type, trade.index.series, trade.option_type)\n", - " if k not in vol_surface:\n", - " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n", - " value_date=datetime.date.today(), interp_method = \"bivariate_linear\")\n", - " vol_surface[k] = vs[vs.list('GS', option_type=trade.option_type)[-1]]\n", + " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n", + " value_date=datetime.date.today(), interp_method = \"bivariate_linear\")\n", + " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(source='GS', option_type=trade.option_type)[-1]]\n", "\n", "#Set original_pv as of yesterday's EOD levels, don't reset PV after this time\n", + "portf.mark(interp_method=\"bivariate_linear\", source_list=['GS'])\n", "portf.reset_pv()\n", "\n", "#set ref to today's levels\n", "portf.value_date = datetime.date.today()\n", - "portf.mark(interp_method=\"bivariate_linear\", source_list=['GS'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ + "portf.mark(interp_method=\"bivariate_linear\", source_list=['GS'])\n", + "\n", "spread_shock = np.round(np.arange(-.1, .1, .01), 4)\n", - "scens = run_portfolio_scenarios(portf, [datetime.datetime.now()], params=['pnl', 'hy_equiv', 'sigma'],\n", + "scens = run_portfolio_scenarios(portf, [datetime.datetime.now()], params=['pnl', 'hy_equiv'],\n", " spread_shock=spread_shock,\n", " vol_shock=[0],\n", + " corr_shock=[0],\n", " vol_surface=vol_surface)\n", "pnl = scens.xs('pnl', level = 2, axis=1).sum(axis=1)\n", "hy_equiv = scens.xs('hy_equiv', level = 2, axis=1).sum(axis=1)\n", "\n", "ig = CreditIndex('IG', 32, '5yr')\n", "ig.mark()\n", + "\n", "pnl.index = pnl.index.set_levels((1+pnl.index.get_level_values('spread_shock')) * ig.spread, level = 'spread_shock')\n", "hy_equiv.index = pnl.index" ] @@ -86,16 +83,8 @@ "metadata": {}, "outputs": [], "source": [ - "pnl.reset_index([\"date\", \"vol_shock\"], drop=True).to_frame(\"pnl\").plot()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "hy_equiv.reset_index([\"date\", \"vol_shock\"], drop=True).to_frame(\"hy_equiv\").plot()" + "pnl, hy_equiv\n", + "#plot_trade_scenarios(portf)" ] }, { @@ -104,7 +93,40 @@ "metadata": {}, "outputs": [], "source": [ - "pnl, hy_equiv" + "#breakeven calc\n", + "index = 'HY'\n", + "series = 32\n", + "value_date = datetime.date.today()\n", + "option_delta = CreditIndex(index, series, '5yr')\n", + "#option_delta.spread = 56.5\n", + "option_delta.price = 106.0\n", + "option1 = BlackSwaption(option_delta, datetime.date(2019, 12, 16), 106.5, option_type=\"receiver\") \n", + "option1.sigma = .35\n", + "option1.notional = 100_000_000 \n", + "option_delta.notional = -option1.delta * option1.notional\n", + "portf = Portfolio([option1, option_delta], trade_ids=['opt1', 'delta'])\n", + "portf.value_date = value_date\n", + "portf.reset_pv()\n", + "portf.value_date = value_date + BDay(1)\n", + "orig_ref = portf.ref\n", + "\n", + "def get_pnl(portf, x):\n", + " portf.ref = x\n", + " portf.sigma = float(vs[surface_id](self.T, np.log(self.moneyness)))\n", + " return portf.pnl\n", + "\n", + "if index == 'IG':\n", + " widening = brentq(lambda x: get_pnl(portf, x), portf.ref, portf.ref + 10)\n", + " portf.ref = orig_ref\n", + " tightening = brentq(lambda x: get_pnl(portf, x), portf.ref-10, portf.ref)\n", + " portf.ref = orig_ref\n", + "else: \n", + " widening = brentq(lambda x: get_pnl(portf, x), portf.ref-3, portf.ref)\n", + " portf.ref = orig_ref\n", + " tightening = brentq(lambda x: get_pnl(portf, x), portf.ref, portf.ref + 3)\n", + " portf.ref = orig_ref\n", + "\n", + "tightening, orig_ref, widening" ] }, { @@ -117,16 +139,16 @@ "index = 'IG'\n", "series = 32\n", "option_delta = CreditIndex(index, series, '5yr') \n", - "option_delta.spread = 60\n", - "option1 = BlackSwaption(option_delta, datetime.date(2019, 9, 17), 90, option_type=\"payer\") \n", - "option2 = BlackSwaption(option_delta, datetime.date(2019, 11, 19), 90, option_type=\"payer\") \n", - "option1.sigma = .6\n", - "option2.sigma = .58\n", + "option_delta.spread = 55\n", + "option1 = BlackSwaption(option_delta, datetime.date(2019, 10, 16), 50, option_type=\"receiver\") \n", + "option2 = BlackSwaption(option_delta, datetime.date(2019, 10, 16), 77.5, option_type=\"payer\") \n", + "option1.sigma = .4\n", + "option2.sigma = .63\n", "option1.notional = 100_000_000 \n", "option2.notional = 100_000_000 \n", "option1.direction = 'Long' \n", "option2.direction = 'Short' \n", - "option_delta.notional = option1.delta * option1.notional + option2.delta * option2.notional\n", + "option_delta.notional = -option1.delta * option1.notional - option2.delta * option2.notional\n", "portf = Portfolio([option1, option2, option_delta], trade_ids=['opt1', 'opt2', 'delta'])" ] }, @@ -135,6 +157,13 @@ "execution_count": null, "metadata": {}, "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "date_range = pd.bdate_range(portf.value_date, portf.value_date + pd.tseries.offsets.BDay(30), freq='3B')\n", "vol_shock = np.arange(-.15, .31, 0.01)\n", @@ -143,7 +172,7 @@ "for trade in portf.swaptions:\n", " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n", " value_date=portf.value_date, interp_method = \"bivariate_linear\")\n", - " vol_surface[(trade.index.index_type, trade.index.series)] = vs[vs.list(option_type='payer')[-1]]\n", + " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(source='GS', option_type=trade.option_type)[-1]]\n", "\n", "df = run_portfolio_scenarios(portf, date_range, params=[\"pnl\"],\n", " spread_shock = spread_shock,\n", @@ -171,7 +200,7 @@ " for trade in portf.swaptions:\n", " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n", " value_date=portf.value_date, interp_method = \"bivariate_linear\")\n", - " vol_surface[(trade.index.index_type, trade.index.series)] = vs[vs.list(option_type='payer')[-1]]\n", + " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(option_type=trade.option_type)[-1]]\n", " \n", " df = run_portfolio_scenarios(portf, date_range, params=[\"pnl\",\"delta\"],\n", " spread_shock = spread_shock,\n", @@ -188,11 +217,16 @@ " else:\n", " df['spread'] = portf.indices[0].spread * (1 + df.spread_shock)\n", " df = df.set_index(['date', 'spread', 'vol_shock'])\n", - " sort_order = [True, True]\n", - " \n", + " sort_order = [True, False]\n", + " \n", + " #If the multilevels index contains strategy drop it\n", + " if df.columns.nlevels == 3: \n", + " df.columns = df.columns.droplevel(level=0)\n", " pnl = df.xs('pnl', axis=1, level=1).sum(axis=1)\n", " for trade_id, t in portf.items():\n", " if isinstance(t, BlackSwaption):\n", + " if len(trade_id) == 2:\n", + " trade_id = trade_id[1]\n", " df[(trade_id, 'delta')] *= -t.notional \n", " delta = df.xs('delta', axis=1, level=1).sum(axis=1).xs(0, level='vol_shock')\n", " delta += sum([x.notional * -1 if x.direction == 'Buyer' else 1 for x in portf.indices])\n", @@ -223,7 +257,7 @@ " for trade in portf.swaptions:\n", " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n", " value_date=portf.value_date, interp_method = \"bivariate_linear\")\n", - " vol_surface[(trade.index.index_type, trade.index.series)] = vs[vs.list(option_type='payer')[-1]]\n", + " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(option_type=trade.option_type)[-1]]\n", "\n", " df = run_portfolio_scenarios(portf, date_range, params=[\"pnl\"],\n", " spread_shock = spread_shock,\n", @@ -239,16 +273,6 @@ "metadata": {}, "outputs": [], "source": [ - "plot_trade_scenarios(portf)\n", - "plot_trade_scenarios(portf, -.15, .5, vol_time_roll=False)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ "#Dec Jan 2017 Trade\n", "option_delta = CreditIndex.from_tradeid(864)\n", "option1 = BlackSwaption.from_tradeid(3, option_delta)\n", @@ -336,100 +360,21 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "#Set up Portfolio\n", - "from risk.swaptions import get_swaption_portfolio\n", - "from risk.tranches import get_tranche_portfolio\n", - "rundate = datetime.date(2019,6,21)\n", - "\n", - "portf = get_swaption_portfolio(rundate, conn)\n", - "\n", - "#index positions\n", - "df = pd.read_sql_query(\"SELECT * from list_cds_positions_by_strat(%s)\",\n", - " dawn_engine, params=(rundate,))\n", - "df = df[df.folder.str.contains(\"OPT\")]\n", - "for t in df.itertuples(index=False):\n", - " portf.add_trade(CreditIndex(redcode=t.security_id, maturity=t.maturity, notional=t.notional),\n", - " (t.folder, t.security_desc))\n", - "\n", - "portf.value_date = rundate\n", - "portf.mark(interp_method=\"bivariate_linear\")\n", - "portf.reset_pv()\n", - "\n", - "#------------------------Calc Scenarios\n", - "vol_surface = {}\n", - "for trade in portf.swaptions:\n", - " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n", - " value_date=rundate, interp_method = \"bivariate_linear\")\n", - " vol_surface[(trade.index.index_type, trade.index.series)] = vs[vs.list(option_type='payer')[-1]]\n", - "vol_shock = [0]\n", - "corr_shock = [0]\n", - "spread_shock = np.round(np.arange(-.2, 1, .05), 3)\n", - "scens = run_portfolio_scenarios(portf, [pd.Timestamp(rundate)], params=['pnl', 'delta'],\n", - " spread_shock=spread_shock,\n", - " vol_shock=vol_shock,\n", - " corr_shock=[0],\n", - " vol_surface=vol_surface)\n", - "\n", - "pnl = scens.xs('pnl', axis=1, level=2)\n", - "pnl = pnl.xs(0, level='vol_shock')\n", - "\n", - "scenarios = (pnl.\n", - " reset_index(level=['date'], drop=True).\n", - " groupby(level=0, axis=1).sum())\n", - "\n", - "options = ['HYOPTDEL', 'HYPAYER', 'HYREC', 'IGOPTDEL', 'IGPAYER', 'IGREC']\n", - "scenarios['options'] = scenarios[set(scenarios.columns).intersection(options)].sum(axis=1)" - ] + "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], - "source": [ - "#Now say that it widens by X percentage, then rebalance, then do the shock again\n", - "rundate = datetime.date(2019,6,21)\n", - "x = 1.2\n", - "for t in portf.swaptions:\n", - " t.index.spread *= x\n", - " vs = vol_surface[(t.index.index_type, t.index.series)]\n", - " t.sigma = max(0.2, float(vs(t.T, math.log(t.moneyness))))\n", - "for t in portf.indices:\n", - " t.spread *= x\n", - "pnl = portf.pnl\n", - "\n", - "analytics.init_ontr(value_date=rundate)\n", - "rebal = analytics._ontr()\n", - "rebal.notional = portf.hy_equiv\n", - "rebal.direction = 'Seller'\n", - "\n", - "rebal.spread *= x\n", - "portf.add_trade(rebal, ('rebalance', 'HYOPTDEL'))\n", - "portf.reset_pv()\n", - "\n", - "swaptions_scens = portf.swaptions[0].shock(params=['pnl', 'pv'],\n", - " spread_shock=spread_shock,\n", - " vol_shock=vol_shock,\n", - " vol_surface=vol_surface)\n", - "\n", - "#------------------------Calc Scenarios\n", - "scens = run_portfolio_scenarios(portf, [pd.Timestamp(rundate)], params=['pnl', 'pv'],\n", - " spread_shock=spread_shock,\n", - " vol_shock=vol_shock,\n", - " corr_shock=[0],\n", - " vol_surface=vol_surface)\n", - "\n", - "pnl = scens.xs('pnl', axis=1, level=2)\n", - "pnl = pnl.xs(0, level='vol_shock')\n", - "\n", - "scenarios = (pnl.\n", - " reset_index(level=['date'], drop=True).\n", - " groupby(level=0, axis=1).sum())\n", - "\n", - "options = ['HYOPTDEL', 'HYPAYER', 'HYREC', 'IGOPTDEL', 'IGPAYER', 'IGREC', 'rebalance']\n", - "scenarios['options'] = scenarios[set(scenarios.columns).intersection(options)].sum(axis=1)" - ] + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/python/notebooks/tranche and swaption portfolio strategy.ipynb b/python/notebooks/tranche and swaption portfolio strategy.ipynb index c0355fb0..e3f5ea1f 100644 --- a/python/notebooks/tranche and swaption portfolio strategy.ipynb +++ b/python/notebooks/tranche and swaption portfolio strategy.ipynb @@ -14,7 +14,7 @@ "from analytics.scenarios import run_tranche_scenarios, run_portfolio_scenarios, run_tranche_scenarios_rolldown\n", "from analytics import Swaption, BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio, ProbSurface\n", "from analytics import DualCorrTranche\n", - "from db import dbconn\n", + "from utils.db import dbconn\n", "from datetime import date\n", "from graphics import plot_color_map\n", "\n", @@ -475,7 +475,33 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "#Selwood strategy - March 2019, long 20bn tranches, short 12 bn options\n", + "index = 'EU'\n", + "series = 30\n", + "ss = DualCorrTranche(index, series, '5yr', attach=12, detach=100, corr_attach=.53, \n", + " corr_detach=.99, tranche_running=100, notional=-20000000000)\n", + "index_1 = 'IG'\n", + "series_1 = 32\n", + "option_delta = CreditIndex(index_1, series_1, '5yr') \n", + "option_delta.spread = 66\n", + "option1 = BlackSwaption(option_delta, datetime.date(2019, 6, 19), 120, option_type=\"payer\") \n", + "option1.sigma = .7\n", + "option1.notional = 12_000_000_000 \n", + "option1.direction = 'Long' \n", + "portf = Portfolio([ss, option1], trade_ids=['ss', 'opt1'])\n", + "portf.reset_pv()\n", + "spread_shock = np.round(np.arange(-.2, 1, .025), 3)\n", + "scens = run_portfolio_scenarios(portf, date_range, params=['pnl', 'delta'],\n", + " spread_shock=spread_shock,\n", + " vol_shock=vol_shock,\n", + " corr_shock=[0],\n", + " vol_surface=vol_surface)\n", + "\n", + "scens = scens.xs((0,0), level=['vol_shock', 'corr_shock'])\n", + "pnl = scens.xs('pnl', axis=1, level=1)\n", + "delta = scens.xs('delta', axis=1, level=1)\n" + ] } ], "metadata": { @@ -494,9 +520,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.1" + "version": "3.8.0" } }, "nbformat": 4, - "nbformat_minor": 2 + "nbformat_minor": 4 } |
