diff options
Diffstat (limited to 'python/notebooks/Reto Report.ipynb')
| -rw-r--r-- | python/notebooks/Reto Report.ipynb | 67 |
1 files changed, 7 insertions, 60 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb index 75156c99..1d0cc038 100644 --- a/python/notebooks/Reto Report.ipynb +++ b/python/notebooks/Reto Report.ipynb @@ -13,16 +13,10 @@ "import numpy as np\n", "\n", "from pandas.tseries.offsets import BDay, MonthEnd\n", - "from analytics.index_data import get_index_quotes\n", "from analytics.scenarios import run_portfolio_scenarios\n", - "from analytics.utils import run_local\n", - "from analytics import BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio, DualCorrTranche\n", - "from copy import deepcopy\n", - "from analytics.curve_trades import on_the_run\n", "from risk.bonds import subprime_risk\n", "from utils.db import dbconn, dbengine, serenitas_engine, dawn_engine\n", "from risk.portfolio import build_portfolio, generate_vol_surface\n", - "from analytics.tranche_basket import DualCorrTranche, TrancheBasket, MarkitTrancheBasket, Skew\n", "from analytics.basket_index import BasketIndex" ] }, @@ -46,9 +40,10 @@ "outputs": [], "source": [ "#Stress scenario for weekly report --> copy paste results to Excel\n", + "fund = 'SERCGMAST'\n", "spread_shock = np.array([100., 200.])\n", "spread_shock /= analytics._ontr['HY'].spread\n", - "portf, _ = build_portfolio(position_date, spread_date)\n", + "portf, _ = build_portfolio(position_date, spread_date, fund)\n", "vol_surface = generate_vol_surface(portf, 5)\n", "\n", "portf.reset_pv()\n", @@ -72,7 +67,7 @@ "outputs": [], "source": [ "################################### JTD\n", - "_, portf = build_portf(position_date, spread_date)\n", + "_, portf = build_portfolio(position_date, spread_date)\n", "jtd_i = []\n", "for t in portf.indices:\n", " bkt = BasketIndex(t.index_type, t.series, [t.tenor])\n", @@ -87,7 +82,7 @@ "ref_names = pd.read_sql_query(\"select ticker, referenceentity from refentity\", dbconn('serenitasdb'), index_col='ticker')\n", "jump = pd.concat([pd.concat(jtd_t), pd.concat(jtd_i)])\n", "jump = jump.merge(ref_names, left_index=True, right_index=True)\n", - "jump.groupby('referenceentity').agg({'spread': np.mean, 'jtd': np.sum}).sort_values(by='jtd', ascending=True)" + "jump = jump.groupby('referenceentity').agg({'spread': np.mean, 'jtd': np.sum}).sort_values(by='jtd', ascending=True)" ] }, { @@ -109,7 +104,7 @@ " vol_surface=vol_surface)\n", "\n", "pnl = scens.xs('pnl', axis=1, level=2)\n", - "pnl = pnl.xs((vol_shock, corr_shock), level=['vol_shock', 'corr_shock'])\n", + "pnl = pnl.xs((0.0, 0.0), level=['vol_shock', 'corr_shock'])\n", "\n", "scenarios = (pnl.\n", " reset_index(level=['date'], drop=True).\n", @@ -189,7 +184,8 @@ "df_1['paydown'] = df_1.apply(lambda df: df.endqty/df.principal_bal * df.principal, axis=1)\n", "paydowns = df_1.paydown.groupby(pd.Grouper(freq='M')).sum()\n", "temp = pd.concat([paydowns, df.principal_payment, df.accrued_payment], axis=1).fillna(0)\n", - "turnover = (temp.sum(axis=1)/nav.begbooknav).rolling(12).sum()" + "turnover = (temp.sum(axis=1)/nav.begbooknav).rolling(12).sum()\n", + "turnover" ] }, { @@ -407,27 +403,6 @@ "metadata": {}, "outputs": [], "source": [ - "#Historical max widening\n", - "df = get_index_quotes('HY', list(range(on_the_run('HY', spread_date) - 10, on_the_run('HY', spread_date) + 1)),\n", - " tenor=['5yr'], years=5)\n", - "df = df.xs('5yr', level='tenor')['close_spread'].groupby(['date', 'series']).last()\n", - "df=df.loc[:'2020-2-28']\n", - "\n", - "widen, tighten = [], []\n", - "#approximately 1,3,6 months move (22 each months)\n", - "for days in [22, 66, 132]: \n", - " calc = df.unstack().pct_change(freq= str(days)+'B').stack().groupby('date').last()\n", - " widen.append(calc.max())\n", - " tighten.append(calc.min())\n", - "pd.DataFrame([widen, tighten], columns=['1M', '3M', '6M'], index=['widen', 'tighten'])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ "################################## Historical Notioinals and HY Equiv\n", "dates = pd.date_range(datetime.date(2013, 1, 30), datetime.datetime.today() - MonthEnd(1), freq=\"BM\")\n", "#look for a day with HY quotes... we need that to construct HY Equiv\n", @@ -442,34 +417,6 @@ " d = d.date()\n", " portfs[d] = build_portf(d)" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { |
