aboutsummaryrefslogtreecommitdiffstats
path: root/python/notebooks/VaR.ipynb
diff options
context:
space:
mode:
Diffstat (limited to 'python/notebooks/VaR.ipynb')
-rw-r--r--python/notebooks/VaR.ipynb71
1 files changed, 60 insertions, 11 deletions
diff --git a/python/notebooks/VaR.ipynb b/python/notebooks/VaR.ipynb
index 3bb3e83d..178d72b3 100644
--- a/python/notebooks/VaR.ipynb
+++ b/python/notebooks/VaR.ipynb
@@ -10,13 +10,16 @@
"from analytics.index_data import get_index_quotes\n",
"from analytics.scenarios import run_portfolio_scenarios\n",
"from analytics import Swaption, BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio, ProbSurface, DualCorrTranche\n",
- "from db import dbconn\n",
+ "from db import dbconn, dbengine\n",
"\n",
"import datetime\n",
"import exploration.VaR as var\n",
"import pandas as pd\n",
+ "import numpy as np\n",
"\n",
- "conn = dbconn('dawndb')"
+ "conn = dbconn('dawndb')\n",
+ "dawndb = dbengine('dawndb')\n",
+ "serenitasdb = dbengine('serenitasdb')"
]
},
{
@@ -25,7 +28,7 @@
"metadata": {},
"outputs": [],
"source": [
- "date = (datetime.date.today() - pd.tseries.offsets.BDay(1)).date()\n",
+ "date = (datetime.date.today() - pd.tseries.offsets.BDay(3)).date()\n",
"report_date = (date + pd.tseries.offsets.BMonthEnd(-1)).date()\n",
"index_type = \"IG\"\n",
"quantile = .025"
@@ -63,7 +66,8 @@
"outputs": [],
"source": [
"#Mortgage Hedge VaR - use IG spread relative move for VaR\n",
- "df = var.get_pos(date, 'HEDGE_MBS')\n",
+ "df = pd.read_sql_query(\"SELECT * from list_cds_marks_by_strat(%s) where strategy ='HEDGE_MBS'\",\n",
+ " dawndb, params=(date,))\n",
"portf = Portfolio([CreditIndex(row.p_index, row.p_series, row.tenor,\n",
" report_date, -row.notional)\n",
" for row in df[['p_index', 'tenor', 'p_series', 'notional']].\n",
@@ -136,7 +140,6 @@
" \"FROM cds WHERE swap_type='CD_INDEX' AND termination_cp IS null \"\n",
" \"AND folder = 'IGOPTDEL' OR folder = 'HYOPTDEL' \"\n",
" \"AND trade_date <= %s\")\n",
- "conn = dbconn('dawndb')\n",
"with conn.cursor() as c:\n",
" #Get Tranche Trade Ids\n",
" c.execute(t_sql_string, (date,))\n",
@@ -158,14 +161,18 @@
" portf.add_trade(CreditIndex.from_tradeid(index_id), 'index_' + str(index_id))\n",
" \n",
"#Update manually - positive notional = long risk\n",
- "non_trancheSwap_risk_notional = 33763230\n",
- "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = date, notional = -non_trancheSwap_risk_notional), 'port')\n",
+ "non_trancheSwap_risk_notional = 49119912 \n",
+ "\n",
+ "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = date, notional = -non_trancheSwap_risk_notional), 'bond')\n",
" \n",
"portf.value_date = date\n",
- "portf.mark()\n",
+ "portf.mark(interp_method=\"bivariate_spline\")\n",
"portf.reset_pv()\n",
" \n",
- "vs = BlackSwaptionVolSurface(portf.swaptions[0].index.index_type, portf.swaptions[0].index.series, value_date=date)\n",
+ "vs = BlackSwaptionVolSurface(portf.swaptions[0].index.index_type, \n",
+ " portf.swaptions[0].index.series, \n",
+ " value_date=date, \n",
+ " interp_method = \"bivariate_spline\")\n",
"vol_surface = vs[vs.list(option_type='payer')[-1]]\n",
"vol_shock = [0]\n",
"corr_shock = [0]\n",
@@ -187,8 +194,50 @@
"metadata": {},
"outputs": [],
"source": [
- "var.cleared_cds_margins(report_date)"
+ "spread_shock = np.arange(-.4, 2.2, .2)\n",
+ "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\"],\n",
+ " spread_shock=spread_shock,\n",
+ " vol_shock=vol_shock,\n",
+ " corr_shock=corr_shock,\n",
+ " vol_surface=vol_surface)\n",
+ "scens.sum(axis=1)\n",
+ "\n",
+ "risk_notional = [t.notional * t._index.duration for t in portf.indices]\n",
+ "portf.trades[0]._index.duration()"
]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#Calculate the margins for cleared CDS required for each strategy\n",
+ "df = pd.read_sql_query(\"SELECT * from list_cds_marks_by_strat(%s)\",\n",
+ " dawndb, params=(date,))\n",
+ "percentile = .95 #monthly 90%tile case.\n",
+ "shocks, widen, tighten, onTR_dur, onTR_spread = {}, {}, {}, {}, {}\n",
+ "for ind in ['IG', 'HY', 'EU']:\n",
+ " shocks[ind], onTR_spread[ind], onTR_dur[ind] = rel_spread_diff(date, index=ind)\n",
+ " widen[ind] = shocks[ind].quantile(percentile)\n",
+ " tighten[ind] = shocks[ind].quantile(1-percentile)\n",
+ "\n",
+ "df['onTR_notional'] = df.apply(lambda df:\n",
+ " df.notional * df.factor * df.duration / onTR_dur[df.p_index], axis=1)\n",
+ "df['widen'] = df.apply(lambda df:\n",
+ " df.onTR_notional * onTR_spread[df.p_index] * onTR_dur[df.p_index] * widen[df.p_index]/10000, axis=1)\n",
+ "df['tighten'] = df.apply(lambda df:\n",
+ " df.onTR_notional * onTR_spread[df.p_index] * onTR_dur[df.p_index] * tighten[df.p_index]/10000, axis=1)\n",
+ "delta_alloc = df.groupby('strategy').sum()\n",
+ "delta_alloc['total'] = delta_alloc.apply(lambda df: max(abs(df.widen), abs(df.tighten)), axis=1)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
}
],
"metadata": {
@@ -207,7 +256,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.7.0"
+ "version": "3.7.1"
}
},
"nbformat": 4,