aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--python/notebooks/Reto Report.ipynb67
-rw-r--r--python/notebooks/risk_sabo.ipynb129
2 files changed, 136 insertions, 60 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index 75156c99..1d0cc038 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -13,16 +13,10 @@
"import numpy as np\n",
"\n",
"from pandas.tseries.offsets import BDay, MonthEnd\n",
- "from analytics.index_data import get_index_quotes\n",
"from analytics.scenarios import run_portfolio_scenarios\n",
- "from analytics.utils import run_local\n",
- "from analytics import BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio, DualCorrTranche\n",
- "from copy import deepcopy\n",
- "from analytics.curve_trades import on_the_run\n",
"from risk.bonds import subprime_risk\n",
"from utils.db import dbconn, dbengine, serenitas_engine, dawn_engine\n",
"from risk.portfolio import build_portfolio, generate_vol_surface\n",
- "from analytics.tranche_basket import DualCorrTranche, TrancheBasket, MarkitTrancheBasket, Skew\n",
"from analytics.basket_index import BasketIndex"
]
},
@@ -46,9 +40,10 @@
"outputs": [],
"source": [
"#Stress scenario for weekly report --> copy paste results to Excel\n",
+ "fund = 'SERCGMAST'\n",
"spread_shock = np.array([100., 200.])\n",
"spread_shock /= analytics._ontr['HY'].spread\n",
- "portf, _ = build_portfolio(position_date, spread_date)\n",
+ "portf, _ = build_portfolio(position_date, spread_date, fund)\n",
"vol_surface = generate_vol_surface(portf, 5)\n",
"\n",
"portf.reset_pv()\n",
@@ -72,7 +67,7 @@
"outputs": [],
"source": [
"################################### JTD\n",
- "_, portf = build_portf(position_date, spread_date)\n",
+ "_, portf = build_portfolio(position_date, spread_date)\n",
"jtd_i = []\n",
"for t in portf.indices:\n",
" bkt = BasketIndex(t.index_type, t.series, [t.tenor])\n",
@@ -87,7 +82,7 @@
"ref_names = pd.read_sql_query(\"select ticker, referenceentity from refentity\", dbconn('serenitasdb'), index_col='ticker')\n",
"jump = pd.concat([pd.concat(jtd_t), pd.concat(jtd_i)])\n",
"jump = jump.merge(ref_names, left_index=True, right_index=True)\n",
- "jump.groupby('referenceentity').agg({'spread': np.mean, 'jtd': np.sum}).sort_values(by='jtd', ascending=True)"
+ "jump = jump.groupby('referenceentity').agg({'spread': np.mean, 'jtd': np.sum}).sort_values(by='jtd', ascending=True)"
]
},
{
@@ -109,7 +104,7 @@
" vol_surface=vol_surface)\n",
"\n",
"pnl = scens.xs('pnl', axis=1, level=2)\n",
- "pnl = pnl.xs((vol_shock, corr_shock), level=['vol_shock', 'corr_shock'])\n",
+ "pnl = pnl.xs((0.0, 0.0), level=['vol_shock', 'corr_shock'])\n",
"\n",
"scenarios = (pnl.\n",
" reset_index(level=['date'], drop=True).\n",
@@ -189,7 +184,8 @@
"df_1['paydown'] = df_1.apply(lambda df: df.endqty/df.principal_bal * df.principal, axis=1)\n",
"paydowns = df_1.paydown.groupby(pd.Grouper(freq='M')).sum()\n",
"temp = pd.concat([paydowns, df.principal_payment, df.accrued_payment], axis=1).fillna(0)\n",
- "turnover = (temp.sum(axis=1)/nav.begbooknav).rolling(12).sum()"
+ "turnover = (temp.sum(axis=1)/nav.begbooknav).rolling(12).sum()\n",
+ "turnover"
]
},
{
@@ -407,27 +403,6 @@
"metadata": {},
"outputs": [],
"source": [
- "#Historical max widening\n",
- "df = get_index_quotes('HY', list(range(on_the_run('HY', spread_date) - 10, on_the_run('HY', spread_date) + 1)),\n",
- " tenor=['5yr'], years=5)\n",
- "df = df.xs('5yr', level='tenor')['close_spread'].groupby(['date', 'series']).last()\n",
- "df=df.loc[:'2020-2-28']\n",
- "\n",
- "widen, tighten = [], []\n",
- "#approximately 1,3,6 months move (22 each months)\n",
- "for days in [22, 66, 132]: \n",
- " calc = df.unstack().pct_change(freq= str(days)+'B').stack().groupby('date').last()\n",
- " widen.append(calc.max())\n",
- " tighten.append(calc.min())\n",
- "pd.DataFrame([widen, tighten], columns=['1M', '3M', '6M'], index=['widen', 'tighten'])"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
"################################## Historical Notioinals and HY Equiv\n",
"dates = pd.date_range(datetime.date(2013, 1, 30), datetime.datetime.today() - MonthEnd(1), freq=\"BM\")\n",
"#look for a day with HY quotes... we need that to construct HY Equiv\n",
@@ -442,34 +417,6 @@
" d = d.date()\n",
" portfs[d] = build_portf(d)"
]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
}
],
"metadata": {
diff --git a/python/notebooks/risk_sabo.ipynb b/python/notebooks/risk_sabo.ipynb
new file mode 100644
index 00000000..dd2cb9f1
--- /dev/null
+++ b/python/notebooks/risk_sabo.ipynb
@@ -0,0 +1,129 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import datetime\n",
+ "import globeop_reports as go\n",
+ "import pandas as pd\n",
+ "import analytics\n",
+ "import numpy as np\n",
+ "\n",
+ "from pandas.tseries.offsets import BDay, MonthEnd\n",
+ "from analytics.scenarios import run_portfolio_scenarios\n",
+ "from utils.db import dbconn\n",
+ "from risk.portfolio import build_portfolio, generate_vol_surface\n",
+ "from analytics.basket_index import BasketIndex"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "#Set dates\n",
+ "position_date = (datetime.date.today() - MonthEnd(1)).date()\n",
+ "spread_date = position_date\n",
+ "analytics._local = False\n",
+ "analytics.init_ontr(spread_date)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "################################### Run Credit Spread scenarios\n",
+ "spread_shock = np.array([-100., -25., 1., +25. , 100.])\n",
+ "spread_shock /= analytics._ontr['HY'].spread\n",
+ "portf, _ = build_portfolio(position_date, spread_date)\n",
+ "vol_surface = generate_vol_surface(portf, 5)\n",
+ "portf.reset_pv()\n",
+ "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=['pnl'],\n",
+ " spread_shock=spread_shock,\n",
+ " vol_shock=[0.0],\n",
+ " corr_shock=[0.0],\n",
+ " vol_surface=vol_surface)\n",
+ "\n",
+ "pnl = scens.xs('pnl', axis=1, level=2)\n",
+ "pnl = pnl.xs((0.0, 0.0), level=['vol_shock', 'corr_shock'])\n",
+ "\n",
+ "scenarios = (pnl.\n",
+ " reset_index(level=['date'], drop=True).\n",
+ " groupby(level=0, axis=1).sum())\n",
+ "\n",
+ "options = ['HYOPTDEL', 'HYPAYER', 'HYREC', 'IGOPTDEL', 'IGPAYER', 'IGREC']\n",
+ "tranches = ['HYMEZ', 'HYINX', 'HYEQY', 'IGMEZ', 'IGINX', 'IGEQY', 'IGSNR', 'IGINX', 'BSPK', 'XOMEZ', 'XOINX', 'EUMEZ']\n",
+ "hedges = ['HEDGE_CLO', 'HEDGE_MAC', 'HEDGE_MBS']\n",
+ "\n",
+ "synthetic =pd.DataFrame()\n",
+ "synthetic['options'] = scenarios[set(scenarios.columns).intersection(options)].sum(axis=1)\n",
+ "synthetic['tranches'] = scenarios[set(scenarios.columns).intersection(tranches)].sum(axis=1)\n",
+ "synthetic['curve_trades'] = scenarios['curve_trades']\n",
+ "synthetic['total'] = synthetic.sum(axis = 1)\n",
+ "nav = go.get_net_navs()\n",
+ "scenarios.sum(axis=1)\n",
+ "scenarios.sum(axis=1).to_clipboard()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "################################### JTD\n",
+ "_, portf = build_portfolio(position_date, spread_date)\n",
+ "jtd_i = []\n",
+ "for t in portf.indices:\n",
+ " bkt = BasketIndex(t.index_type, t.series, [t.tenor])\n",
+ " spreads = pd.DataFrame(bkt.spreads() * 10000, index=pd.Index(bkt.tickers, name='ticker'), columns=['spread'])\n",
+ " jump = pd.merge(spreads, bkt.jump_to_default() * t.notional, left_index=True, right_index=True)\n",
+ " jtd_i.append(jump.rename(columns={jump.columns[1]: 'jtd'}))\n",
+ "jtd_t = []\n",
+ "for t in portf.tranches:\n",
+ " jump = pd.concat([t.singlename_spreads().reset_index(['seniority', 'doc_clause'], drop=True), t.jump_to_default().rename('jtd')], axis=1)\n",
+ " jtd_t.append(jump.drop(['weight', 'recovery'], axis=1))\n",
+ "\n",
+ "ref_names = pd.read_sql_query(\"select ticker, referenceentity from refentity\", dbconn('serenitasdb'), index_col='ticker')\n",
+ "jump = pd.concat([pd.concat(jtd_t), pd.concat(jtd_i)])\n",
+ "jump = jump.merge(ref_names, left_index=True, right_index=True)\n",
+ "jump = jump.groupby('referenceentity').agg({'spread': np.mean, 'jtd': np.sum}).sort_values(by='jtd', ascending=True)\n",
+ "jump.to_clipboard()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3.8.1 64-bit",
+ "language": "python",
+ "name": "python38164bitc40c8740e5d542d7959acb14be96f4f3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.8.5"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}