aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/notebooks/Reto Report.ipynb33
1 files changed, 22 insertions, 11 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index c1170b16..12f3f3af 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -18,13 +18,11 @@
"from analytics import BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio,DualCorrTranche\n",
"from copy import deepcopy\n",
"\n",
- "from utils.db import dbconn, dbengine\n",
- "\n",
"from risk.tranches import get_tranche_portfolio\n",
"from risk.swaptions import get_swaption_portfolio\n",
"from risk.bonds import subprime_risk, clo_risk, crt_risk\n",
"\n",
- "dawn_engine = dbengine('dawndb')"
+ "from utils.db import dbconn, dbengine, serenitas_engine, dawn_engine"
]
},
{
@@ -208,7 +206,12 @@
"#min/max/mean cds trades by notional\n",
"volume = g.sum()/nav.endbooknav\n",
"volume.fillna(0, inplace=True)\n",
- "volume.min(), volume.max()/4, volume.mean()/4"
+ "volume.min(), volume.max()/4, volume.mean()/4\n",
+ "\n",
+ "#Max trades per day - CDS trades only, bond trades only, combined bond/cds trades\n",
+ "cds_trades[cds_trades.cp_code != 'CONTRA'].groupby(pd.Grouper(freq='D')).count().max()\n",
+ "bond_trades.groupby(pd.Grouper(freq='D')).count().max()\n",
+ "cds_trades[cds_trades.cp_code != 'CONTRA'].id.append(bond_trades.id).groupby(pd.Grouper(freq='D')).count().max()"
]
},
{
@@ -334,6 +337,13 @@
"source": [
"################################## Historical Notioinals and HY Equiv\n",
"dates = pd.date_range(datetime.date(2013, 1, 30), pd.datetime.today() - MonthEnd(1), freq=\"BM\")\n",
+ "#look for a day with HY quotes... we need that to construct HY Equiv\n",
+ "sql_string = 'select distinct(date) from index_quotes where index = %s order by date asc'\n",
+ "hy_dates = pd.read_sql_query(sql_string, serenitas_engine, parse_dates = 'date', params=['HY',])\n",
+ "def nearest(items, pivot):\n",
+ " return min(items, key=lambda x: abs(x - pivot))\n",
+ "hy_dates.apply(lambda x: nearest(dates, x))\n",
+ "pd.merge_asof(pd.DataFrame(dates), hy_dates, left_index=True, right_index=True)\n",
"portfs = {}\n",
"for d in dates:\n",
" d = d.date()\n",
@@ -346,8 +356,9 @@
"metadata": {},
"outputs": [],
"source": [
- "##################################\n",
+ "#### Function to build portfolio\n",
"def build_portf(position_date, spread_date=None):\n",
+ " analytics.init_ontr()\n",
" if spread_date is None:\n",
" spread_date=position_date\n",
" conn = dawn_engine.raw_connection()\n",
@@ -386,7 +397,7 @@
"\n",
" #get bond risks:\n",
" sql_string = (\"SELECT distinct timestamp::date FROM priced where normalization = 'current_notional' and model_version = 1 \"\n",
- " \"and date(timestamp) < %s and date(timestamp) > %s order by timestamp desc\")\n",
+ " \"and date(timestamp) <= %s and date(timestamp) >= %s order by timestamp desc\")\n",
" with dbconn('etdb') as etconn, dbconn('dawndb') as dawnconn:\n",
" timestamps = pd.read_sql_query(sql_string, dawn_engine, parse_dates=[\"timestamp\"], \n",
" params=[position_date, position_date - pd.tseries.offsets.DateOffset(15, \"D\")])\n",
@@ -420,7 +431,7 @@
"################################### Calculate stress scenario \n",
"position_date = (datetime.date.today() - BDay(1)).date()\n",
"spread_date = position_date\n",
- "#analytics.init_ontr(spread_date)"
+ "analytics.init_ontr(spread_date)"
]
},
{
@@ -461,7 +472,7 @@
" except:\n",
" vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series + 1, \n",
" value_date=spread_date, interp_method = \"bivariate_linear\")\n",
- " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(source='GS', option_type=trade.option_type)[-1]]\n",
+ " vol_surface[(trade.index.index_type, trade.index.series, trade.option_type)] = vs[vs.list(source='MS', option_type=trade.option_type)[-1]]\n",
"\n",
"scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=[\"pnl\"],\n",
" spread_shock=widen,\n",
@@ -492,9 +503,9 @@
"source": [
"################################### Run set of scenario\n",
"spread_shock = np.round(np.arange(-.2, 1, .05), 3)\n",
- "scens = run_portfolio_scenarios(portf, date_range, params=['pnl', 'delta'],\n",
+ "scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=['pnl', 'delta'],\n",
" spread_shock=spread_shock,\n",
- " vol_shock=vol_shock,\n",
+ " vol_shock=[0],\n",
" corr_shock=[0],\n",
" vol_surface=vol_surface)\n",
"\n",
@@ -548,7 +559,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.7.4"
+ "version": "3.8.0"
}
},
"nbformat": 4,