aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--python/notebooks/Reto Report.ipynb183
1 files changed, 27 insertions, 156 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index d0c16dea..5a95d1e0 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -169,20 +169,6 @@
"metadata": {},
"outputs": [],
"source": [
- "from serenitas.utils.db2 import dbconn\n",
- "import datetime\n",
- "conn = dbconn('dawndb')\n",
- "from risk.tranches import get_tranche_portfolio\n",
- "tranches = get_tranche_portfolio(datetime.date(2023,2,17), conn, False)\n",
- "jtd = tranches.jtd_single_names()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
"################################### Find the strategies that are not defined: undefined needs to be mapped in strat_map\n",
"strats = pd.read_csv('/home/serenitas/edwin/Python/strat_map.csv')\n",
"nav = go.get_net_navs()\n",
@@ -207,15 +193,6 @@
"metadata": {},
"outputs": [],
"source": [
- "undefined"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
"################################### Average Portfolio Sales Turnover\n",
"#Rolling min(month from inception, 12 months) sum of (total bond sales proceeds + paydown)/monthly NAV\n",
"warnings.filterwarnings('ignore')\n",
@@ -256,54 +233,19 @@
" fund=fund,\n",
" conn=dbconn(\"dawndb\"),\n",
" asset_class =ac)\n",
- "df_inst = pd.concat(df_inst)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "################################### Number of position (min/max/average) /position size (min/max/average) /Top 10 position size\n",
- "portfolio = go.get_portfolio()\n",
+ "df_inst = pd.concat(df_inst)\n",
"nav = go.get_net_navs()\n",
- "exc_port_list = [None, 'SERCGLLC__SERCGLLC', 'CASH', 'SERCGLTD__SERCGLTD', 'GFS_HELPER_BUSINESS_UNIT', 'SER_TEST__SER_TEST']\n",
- "exc_inst_list = ['CAD', 'CADF', 'SEREONUS', 'USD', 'USDF', 'USDLOAN', 'EUR', 'EURLOAN', 'USDCASHINT',\n",
- " 'USDLOANOLD', 'USDSWAPFEE', 'EURF','CADCASHINT','COMMISSIONFEES', 'EURCASHINT', 'COMMUNICATIONFEES']\n",
- "exc_inst_list2 = ['86359DUR6OLD2','004375DV0OLD4','32027GAD8OLD7','75406DAC7OLD7','86359DMN4OLD7','45661EAW4OLD7']\n",
- "\n",
- "portfolio = portfolio[~portfolio.port.isin(exc_port_list) &\n",
- " ~portfolio.identifier.isin(exc_inst_list) &\n",
- " ~portfolio.identifier.isin(exc_inst_list2)]\n",
- "\n",
- "all_positions = portfolio.groupby(['periodenddate', 'identifier'])['endbooknav'].sum() \n",
- "num_pos = all_positions.groupby('periodenddate').count()\n",
- "#min/max/mean number of positions\n",
- "num_pos.min(), num_pos.max(), num_pos.mean()\n",
- "\n",
- "bonds = portfolio[(portfolio.custacctname == 'V0NSCLMAMB') &\n",
- " (portfolio.identifier != 'USD') &\n",
- " (portfolio.endqty != 0) &\n",
- " (portfolio.port.isin(['MORTGAGES', 'STRUCTURED', 'CLO'])) &\n",
- " (~portfolio.strat.isin(['MBSCDS']))]\n",
- "\n",
- "monthend_bonds = bonds.groupby(pd.Grouper(freq=\"M\"), group_keys=False).apply(\n",
- " lambda df: df.loc[df.index[-1]]\n",
- " )\n",
- "monthend_bonds = monthend_bonds.groupby(['periodenddate', 'identifier']).sum()\n",
- "nav.index.rename('periodenddate', inplace=True)\n",
- "monthend_bonds = monthend_bonds.merge(nav, left_index=True, right_index=True, suffixes=('_bond', '_fund'))\n",
- "monthend_bonds['percentage'] = monthend_bonds.endbooknav_bond/monthend_bonds.endbooknav_fund\n",
+ "monthend_bonds = df_inst.merge(nav['endbooknav'], left_on = 'date', right_on= 'date')\n",
+ "monthend_bonds['percentage'] = monthend_bonds.usd_market_value/monthend_bonds.endbooknav\n",
"last_date = monthend_bonds.index.get_level_values(0).max() \n",
"latest = monthend_bonds.loc[last_date]\n",
"#min/max/mean position size\n",
"latest['percentage'][latest['percentage']>0.0000001].min(), latest['percentage'].max(), latest['percentage'].mean()\n",
"#10 largest positions\n",
- "ten_largest = monthend_bonds.groupby('periodenddate').apply(lambda df: df['percentage'].nlargest(10).sum())\n",
+ "ten_largest = monthend_bonds.groupby('date').apply(lambda df: df['percentage'].nlargest(10).sum())\n",
"print(\"ten largest position - min/max/mean\", ten_largest.min(), ten_largest.max(), ten_largest.mean())\n",
"#5 largest positions in the last 5 years\n",
- "five_largest = monthend_bonds.groupby('periodenddate').apply(lambda df: df['percentage'].nlargest(5).sum())\n",
+ "five_largest = monthend_bonds.groupby('date').apply(lambda df: df['percentage'].nlargest(5).sum())\n",
"print(\"five largest position - min/max/mean\",\n",
" five_largest[datetime.date.today() - pd.DateOffset(years=5):].min(),\n",
" five_largest[datetime.date.today() - pd.DateOffset(years=5):].max(),\n",
@@ -316,23 +258,6 @@
"metadata": {},
"outputs": [],
"source": [
- "rmbs_pos = subprime_risk(position_date, conn, dbengine(\"rmbs_model\"), fund=fund)\n",
- "crt_pos = crt_risk(position_date, conn, fund=fund)\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
"################################### Average Traded Volume\n",
"nav = go.get_net_navs()\n",
"sql_string = \"SELECT * FROM bond_trades where fund='SERCGMAST'\"\n",
@@ -400,23 +325,32 @@
"outputs": [],
"source": [
"################################## Calculate Historical Bond Duration/Yield\n",
+ "from serenitas.utils.db import dbengine\n",
"fund = 'SERCGMAST'\n",
- "CBM = CustomBusinessMonthEnd(calendar=np.busdaycalendar())\n",
+ "bdd = np.busdaycalendar(holidays=['2021-05-31', \n",
+ " '2021-05-28',\n",
+ " '2021-05-27',\n",
+ " '2021-05-26',\n",
+ " '2020-12-31',\n",
+ " '2020-12-30'])\n",
+ "CBM = CustomBusinessMonthEnd(calendar=bdd)\n",
"dates = pd.bdate_range(\"2015-1-31\", datetime.date.today() - MonthEnd(1), \n",
" freq=CBM)\n",
"bond_stats=pd.DataFrame()\n",
- "with dawn_pool.connection() as conn, dbconn(\"etdb\") as et_conn:\n",
- " for d in dates:\n",
- " sub = subprime_risk(d.date(), dawnconn , dbengine(\"rmbs_model\"), fund=fund)\n",
- " sub=sub[sub.pv1>0]\n",
- " crt = crt_risk(d.date(), dawnconn , dbengine(\"crt\"), fund=fund)\n",
- " clo = clo_risk(d.date(), dawnconn , dbconn(\"etdb\"), fund=fund)\n",
- " bonds = pd.concat([sub,crt,clo]).dropna(subset=['modDur', 'usd_market_value'])\n",
- " bond_stats.at[d, 'dur']= sum(bonds.notional * bonds.factor * bonds.modDur)/sum(bonds.notional * bonds.factor)\n",
- " bond_stats.at[d, 'yield'] = (sum(bonds.usd_market_value * bonds.modDur * bonds.bond_yield) /\n",
- " sum(bonds.usd_market_value * bonds.modDur))\n",
- " bond_stats.at[d, 'swap_rate'] = (sum(bonds.usd_market_value * bonds.modDur * bonds.swap_rate) /\n",
- " sum(bonds.usd_market_value * bonds.modDur))\n",
+ "for d in dates:\n",
+ " sub = subprime_risk(d.date(), dbconn('dawndb') , dbengine(\"rmbs_model\"), fund=fund)\n",
+ " sub=sub[sub.pv1>0]\n",
+ " crt = crt_risk(d.date(), dbconn('dawndb') , fund=fund)\n",
+ " clo = clo_risk(d.date(), dbconn('dawndb') , dbconn(\"etdb\"), fund=fund)\n",
+ " crt.rename(columns={'duration': 'modDur'}, inplace=True)\n",
+ " bonds = pd.concat([sub,crt,clo]).dropna(subset=['modDur', 'usd_market_value'])\n",
+ " bonds['gross_carry'] = bonds.bond_yield * bonds.usd_market_value\n",
+ " bond_stats.at[d, 'gross_yield'] = sum(bonds.gross_carry)/sum(bonds.usd_market_value)\n",
+ " bond_stats.at[d, 'dur'] = sum(bonds.notional * bonds.factor * bonds.modDur)/sum(bonds.notional * bonds.factor)\n",
+ " bond_stats.at[d, 'yield'] = (sum(bonds.usd_market_value * bonds.modDur * bonds.bond_yield) /\n",
+ " sum(bonds.usd_market_value * bonds.modDur))\n",
+ " bond_stats.at[d, 'swap_rate'] = (sum(bonds.usd_market_value * bonds.modDur * bonds.swap_rate) /\n",
+ " sum(bonds.usd_market_value * bonds.modDur))\n",
"bond_stats['dm'] = bond_stats['yield'] - bond_stats['swap_rate']"
]
},
@@ -467,15 +401,6 @@
"metadata": {},
"outputs": [],
"source": [
- "bond_stats.to_clipboard()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
"################################## FX Exposure, any net CAD/EUR exposures are FX exposure. \n",
"################### doesn't add up to 1 including the USD as we now sum up all the NAVs after adjusting the Futures\n",
"nav = go.get_net_navs()\n",
@@ -616,60 +541,6 @@
"income.T.groupby(pd.Grouper(freq='A')).sum().T.sort_index().to_clipboard()\n",
"appreciation.T.groupby(pd.Grouper(freq='A')).sum().T.sort_index().to_clipboard()"
]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "#####our jump test\n",
- "from serenitas.analytics import DualCorrTranche\n",
- "from serenitas.analytics import Swaption, BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio, ProbSurface\n",
- "trade = DualCorrTranche('HY', 29, '5yr', attach=0, detach=15, corr_attach=np.nan, \n",
- " corr_detach=.35, tranche_running=100, notional=-10000000, use_trunc=True)\n",
- "portf = Portfolio([trade, ], ['trade', ])\n",
- "portf.mark()\n",
- "jtd = portf.jtd_single_names()\n",
- "conn = serenitas_pool.getconn()\n",
- "surv_curves = load_all_curves(conn, spread_date)\n",
- "serenitas_pool.putconn(conn)\n",
- "surv_curves['spread'] = surv_curves['curve'].apply(lambda sc: sc.to_series(forward=False)[5] * (1-sc.recovery_rates[5]))\n",
- "jtd_sabo = jtd[[jtd.columns[0]]].join(surv_curves.groupby(level=0).first()[['name', 'company_id', 'spread']])\n",
- "jtd_sabo.columns = ['jtd', 'name', 'company_id', 'spread']\n",
- "jtd_sabo = jtd_sabo.groupby(['company_id', 'name']).sum()\n",
- "jtd_sabo = jtd_sabo.sort_values('spread', ascending=False)\n",
- "top_5_avg_loss = jtd_sabo.nlargest(5, columns='jtd')['jtd'].mean()\n",
- "widest_5_total_loss = jtd_sabo.nlargest(5, columns='spread')['jtd'].sum()"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
}
],
"metadata": {