{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import datetime\n", "import pandas.tseries.offsets as off\n", "import globeop_reports as go\n", "import pandas as pd\n", "import matplotlib.pyplot as plt\n", "import numpy as np\n", "import exploration.VaR as var\n", "import analytics\n", "\n", "from analytics.curve_trades import curve_pos, on_the_run\n", "from analytics.index_data import get_index_quotes\n", "from analytics.scenarios import run_portfolio_scenarios, run_swaption_scenarios\n", "from analytics import Swaption, BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio, ProbSurface, DualCorrTranche\n", "from db import dbconn, dbengine\n", "\n", "conn = dbconn('dawndb')\n", "dawndb = dbengine('dawndb')\n", "serenitasdb = dbengine('serenitasdb')\n", "analytics.init_ontr()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#PNL Allocation\n", "date = datetime.date.today() - off.BDay(1)\n", "report_date = date - off.MonthEnd(1)\n", "report_date" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#Find the strategies that are not defined: undefined needs to be mapped in strat_map\n", "strats = pd.read_csv('/home/serenitas/edwin/Python/strat_map.csv')\n", "nav = go.get_net_navs()\n", "m_pnl = go.get_monthly_pnl(['strat', 'custacctname'])\n", "m_pnl = m_pnl.reset_index().merge(strats, on=['strat', 'custacctname'], how='left')\n", "undefined = m_pnl[m_pnl.pnl.isna()].groupby(['strat', 'custacctname']).last()\n", "#Get PNL Allocation\n", "#Input latest NAVS to: '/home/serenitas/edwin/Python/subscription_fee_data.csv'\n", "pnl_alloc = m_pnl.groupby(['date', 'pnl']).sum()\n", "pnl_alloc = pnl_alloc.join(nav.begbooknav)\n", "pnl_alloc['strat_return'] = pnl_alloc.mtdtotalbookpl / pnl_alloc.begbooknav\n", "#rolling 12 months PNL per strategy - copy to RiskMonitor\n", "start_date = report_date - pd.tseries.offsets.MonthEnd(11)\n", "rolling_return = pnl_alloc[start_date:report_date].groupby('pnl').sum()['strat_return']\n", "rolling_return.to_clipboard()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#Average Portfolio Sales Turnover - as of last monthend from today\n", "#(total Bond Sales Proceeds + paydown)/average starting 12 months NAV\n", "nav = go.get_net_navs()\n", "sql_string = \"SELECT * FROM bonds where buysell = 'False'\"\n", "df = pd.read_sql_query(sql_string, dbengine('dawndb'),\n", " parse_dates={'lastupdate':'utc=True', 'trade_date':'', 'settle_date':''},\n", " index_col = 'trade_date')\n", "df = df.groupby(pd.Grouper(freq='M')).sum()\n", "#Now get portfolio paydown per month\n", "portfolio = go.get_portfolio()\n", "portfolio = portfolio[(portfolio.custacctname == 'V0NSCLMAMB') &\n", " (portfolio.port == 'MORTGAGES') &\n", " (portfolio.identifier != 'USD') &\n", " (portfolio.endqty != 0)]\n", "portfolio = portfolio.set_index('identifier', append=True)\n", "portfolio = portfolio['endqty'].groupby(['identifier', 'periodenddate']).sum()\n", "portfolio = portfolio.reset_index('identifier') \n", "sql_string = \"SELECT * from cashflow_history\"\n", "cf = pd.read_sql_query(sql_string, dbengine('dawndb'), parse_dates=['date'],\n", " index_col=['date']).sort_index()\n", "df_1 = pd.merge_asof(cf, portfolio.sort_index(), left_index=True, right_index=True, by='identifier')\n", "df_1 = df_1.dropna(subset=['endqty'])\n", "df_1 = df_1[(df_1.principal_bal != 0) & (df_1.principal != 0)]\n", "df_1['paydown'] = df_1.apply(lambda df: df.endqty/df.principal_bal * df.principal, axis=1)\n", "paydowns = df_1.paydown.groupby(pd.Grouper(freq='M')).sum()\n", "temp = pd.concat([paydowns, df.principal_payment, df.accrued_payment], axis=1).fillna(0)\n", "turnover = temp.rolling(12).sum().sum(axis=1)/ nav.begbooknav.rolling(12).mean()\n", "turnover[12:].plot()\n", "turnover[-1]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#Calculate amount of stress for reports\n", "df = get_index_quotes('HY', list(range(on_the_run('HY') - 10, on_the_run('HY') + 1)),\n", " tenor=['5yr'], years=5)\n", "df = df.xs('5yr', level='tenor')['close_spread'].groupby(['date', 'series']).last()\n", "\n", "widen, tighten = [], []\n", "#approximately 1,3,6 months move (22 each months)\n", "for days in [22, 66, 132]: \n", " calc = df.unstack().pct_change(freq= str(days)+'B').stack().groupby('date').last()\n", " widen.append(calc.max())\n", " tighten.append(calc.min())\n", "pd.DataFrame([widen, tighten], columns=['1M', '3M', '6M'], index=['widen', 'tighten'])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "position_date = (datetime.date.today() - pd.tseries.offsets.BDay(2)).date()\n", "spread_date = position_date\n", "shock_date = (datetime.date.today() - pd.tseries.offsets.BDay(2)).date()\n", "(position_date, spread_date, shock_date)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "#Current tranche and swaptions positions\n", "t_sql_string = (\"SELECT id, folder, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n", " \"OVER (partition by security_id, attach) AS ntl_agg \"\n", " \"FROM cds WHERE swap_type='CD_INDEX_TRANCHE' AND termination_cp IS NULL \"\n", " \"AND trade_date <= %s\")\n", "swaption_sql_string = (\"select id, folder, expiration_date from swaptions where date(expiration_date) \"\n", " \"> %s and swap_type = 'CD_INDEX_OPTION' \"\n", " \"AND trade_date <= %s AND termination_date iS NULL\")\n", "index_sql_string = (\"SELECT id, folder, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n", " \"OVER (partition by security_id, attach) AS ntl_agg \"\n", " \"FROM cds WHERE swap_type='CD_INDEX' AND termination_cp IS null \"\n", " \"AND trade_date <= %s AND maturity > %s\")\n", "with conn.cursor() as c:\n", " c.execute(t_sql_string, (position_date,))\n", " t_trades = [[dealid, f\"{folder}_{dealid}\"] for dealid, folder, ntl in c if ntl != 0]\n", " c.execute(swaption_sql_string, (position_date, position_date))\n", " swaption_trades = [[dealid, f\"{folder}_{dealid}\", expiration_date] for dealid, folder, expiration_date in c]\n", " c.execute(index_sql_string, (position_date, position_date))\n", " index_trades = [[dealid, f\"{folder}_{dealid}\"] for dealid, folder, ntl in c if ntl != 0]\n", " \n", "portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid, _ in t_trades],\n", " [trade_id for _, trade_id in t_trades])\n", "for trade_id, name, expiration_date in swaption_trades:\n", " if expiration_date > shock_date:\n", " portf.add_trade(BlackSwaption.from_tradeid(trade_id), name)\n", "for trade_id, name in index_trades:\n", " portf.add_trade(CreditIndex.from_tradeid(trade_id), name)\n", " \n", "#get bond risks:\n", "rmbs_pos = go.rmbs_pos(position_date)\n", "clo_pos = go.clo_pos(position_date)\n", "r = serenitasdb.execute(\"select duration from on_the_run where index = 'HY' and date = %s\",\n", " spread_date)\n", "duration, = next(r)\n", "rmbs_pos['hy_equiv'] = rmbs_pos['delta_yield']/duration * 100\n", "notional = rmbs_pos['hy_equiv'].sum() + clo_pos['hy_equiv'].sum()\n", "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', \n", " value_date = spread_date, \n", " notional = -notional), 'bonds')\n", " \n", "portf.value_date = spread_date\n", "portf.mark(interp_method=\"bivariate_linear\")\n", "portf.reset_pv()\n", "\n", "vol_surface = {}\n", "for trade in portf.swaptions:\n", " vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n", " value_date=spread_date, interp_method = \"bivariate_linear\")\n", " vol_surface[(trade.index.index_type, trade.index.series)] = vs[vs.list(option_type='payer')[-1]]\n", "vol_shock = [0]\n", "corr_shock = [0, -.1]\n", "spread_shock = tighten + [0] + widen\n", "date_range = [pd.Timestamp(shock_date)]\n", "\n", "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\"],\n", " spread_shock=spread_shock,\n", " vol_shock=vol_shock,\n", " corr_shock=corr_shock,\n", " vol_surface=vol_surface)\n", "\n", "attrib = scens.stack(level=0).reset_index()\n", "attrib['strategy'] = attrib['level_4'].str.split('_\\d+', expand=True).iloc[:,0]\n", "attrib = attrib.set_index(['spread_shock', 'strategy', 'corr_shock'])\n", "attrib = attrib.groupby(['spread_shock', 'strategy', 'corr_shock']).sum()\n", "\n", "results = attrib.xs((widen[0], -0.1), level = ['spread_shock','corr_shock'])\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.1" } }, "nbformat": 4, "nbformat_minor": 2 }