aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/globeop_reports.py4
-rw-r--r--python/notebooks/Reto Report.ipynb103
2 files changed, 80 insertions, 27 deletions
diff --git a/python/globeop_reports.py b/python/globeop_reports.py
index a49cae6c..2fe60c8e 100644
--- a/python/globeop_reports.py
+++ b/python/globeop_reports.py
@@ -15,7 +15,7 @@ def get_monthly_pnl(group_by=["identifier"]):
df_pnl = pd.read_sql_query(
sql_string, dawn_engine, parse_dates=["date"], index_col=["date"]
)
- df_pnl["identifier"] = df_pnl.invid.str.replace("_A$", "")
+ df_pnl["identifier"] = df_pnl.invid.str.replace("_A$", "", regex=True)
pnl_cols = [
"bookunrealmtm",
"bookrealmtm",
@@ -49,7 +49,7 @@ def get_portfolio(report_date=None):
parse_dates=["periodenddate"],
index_col=["periodenddate"],
)
- df["identifier"] = df.invid.str.replace("_A$", "")
+ df["identifier"] = df.invid.str.replace("_A$", "", regex=True)
return df
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index 542d324c..bda76756 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -11,15 +11,17 @@
"import pandas as pd\n",
"import numpy as np\n",
"\n",
- "from pandas.tseries.offsets import BDay, MonthEnd\n",
+ "from pandas.tseries.offsets import BDay, MonthEnd, CustomBusinessMonthEnd\n",
"\n",
"from risk.bonds import subprime_risk, crt_risk, clo_risk\n",
"from risk.portfolio import build_portfolio, generate_vol_surface\n",
"\n",
"import serenitas.analytics as ana\n",
+ "from serenitas.analytics.index_data import load_all_curves\n",
"from serenitas.analytics.scenarios import run_portfolio_scenarios\n",
"from serenitas.analytics.basket_index import BasketIndex\n",
- "from serenitas.utils.db import dbconn, dbengine, serenitas_engine, dawn_engine"
+ "from serenitas.utils.db import dbconn, dbengine, serenitas_engine, dawn_engine, serenitas_pool\n",
+ "#from dates import bond_cal"
]
},
{
@@ -30,9 +32,10 @@
"source": [
"#Set dates\n",
"position_date = (datetime.date.today() - BDay(1)).date()\n",
- "spread_date = position_date\n",
+ "spread_date = (datetime.date.today() - BDay(1)).date()\n",
"ana._local = False\n",
- "ana.init_ontr(spread_date)"
+ "ana.init_ontr(spread_date)\n",
+ "fund='SERCGMAST'"
]
},
{
@@ -42,10 +45,10 @@
"outputs": [],
"source": [
"################################### Run scenarios\n",
- "spread_shock = np.array([-100., -25., 1., +25. , 100., 200])\n",
+ "spread_shock = np.array([-100., -25., 1., +25. , 100., 200., 300.])\n",
"spread_shock /= ana._ontr['HY'].spread\n",
- "portf, _ = build_portfolio(position_date, spread_date)\n",
- "vol_surface = generate_vol_surface(portf, 5)\n",
+ "portf, _ = build_portfolio(position_date, spread_date, fund)\n",
+ "vol_surface = generate_vol_surface(portf, 10, 'MS')\n",
"portf.reset_pv()\n",
"scens = run_portfolio_scenarios(portf, date_range=[pd.Timestamp(spread_date)], params=['pnl', 'hy_equiv'],\n",
" spread_shock=spread_shock,\n",
@@ -61,8 +64,11 @@
" 'EUSNR', 'EUMEZ', 'EUINX', 'EUEQY', \n",
" 'XOSNR', 'XOMEZ', 'XOINX', 'XOEQY', \n",
" 'BSPK']\n",
- "\n",
- "scens = scens.xs((0.0, 0.0), level=['vol_shock', 'corr_shock'])\n",
+ "if fund == 'BRINKER': \n",
+ " scens = scens.xs(0, level='corr_shock')\n",
+ "else:\n",
+ " scens = scens.xs((0.0, 0.0), level=['vol_shock', 'corr_shock'])\n",
+ " \n",
"scens.columns.names=['strategy', 'trade_id', 'scen_type']\n",
"\n",
"results = {}\n",
@@ -93,6 +99,26 @@
"metadata": {},
"outputs": [],
"source": [
+ "#####our jump risks\n",
+ "jtd = portf.jtd_single_names()\n",
+ "conn = serenitas_pool.getconn()\n",
+ "surv_curves = load_all_curves(conn, spread_date)\n",
+ "serenitas_pool.putconn(conn)\n",
+ "surv_curves['spread'] = surv_curves['curve'].apply(lambda sc: sc.to_series(forward=False)[5] * (1-sc.recovery_rates[5]))\n",
+ "jtd_sabo = jtd[[jtd.columns[0]]].join(surv_curves.groupby(level=0).first()[['name', 'company_id', 'spread']])\n",
+ "jtd_sabo.columns = ['jtd', 'name', 'company_id', 'spread']\n",
+ "jtd_sabo = jtd_sabo.groupby(['company_id', 'name']).sum()\n",
+ "jtd_sabo = jtd_sabo.sort_values('spread', ascending=False)\n",
+ "top_5_avg_loss = jtd_sabo.nlargest(5, columns='jtd')['jtd'].mean()\n",
+ "widest_5_total_loss = jtd_sabo.nlargest(5, columns='spread')['jtd'].sum()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
"################################### Find the strategies that are not defined: undefined needs to be mapped in strat_map\n",
"strats = pd.read_csv('/home/serenitas/edwin/Python/strat_map.csv')\n",
"nav = go.get_net_navs()\n",
@@ -117,9 +143,8 @@
"metadata": {},
"outputs": [],
"source": [
- "################################### Average Portfolio Sales Turnover - as of last monthend from today\n",
- "#(total Bond Sales Proceeds + paydown)/average starting 12 months NAV\n",
- "#Actually: Rolling 12 months sum of (total bond sales proceeds + paydown)/monthly NAV\n",
+ "################################### Average Portfolio Sales Turnover\n",
+ "#Rolling min(month from inception, 12 months) sum of (total bond sales proceeds + paydown)/monthly NAV\n",
"nav = go.get_net_navs()\n",
"fund='SERCGMAST'\n",
"sql_string = \"SELECT * FROM bond_trades WHERE NOT buysell and fund = %s\"\n",
@@ -135,7 +160,9 @@
"portfolio = portfolio[(portfolio.custacctname == 'V0NSCLMAMB') &\n",
" (portfolio.identifier != 'USD') &\n",
" (portfolio.endqty != 0)]\n",
- "cf = pd.read_sql_query(\"SELECT * FROM cashflow_history\", dawn_engine,\n",
+ "cf = pd.read_sql_query(\"select date, principal_bal, principal, cusip as identifier \"\n",
+ " \"from cashflow_history a left join \"\n",
+ " \"(select figi, cusip from securities) b on a.identifier = b.figi\", dawn_engine,\n",
" parse_dates=['date'],\n",
" index_col=['date']).sort_index()\n",
"portfolio = portfolio.set_index('identifier', append=True)\n",
@@ -147,7 +174,7 @@
"df_1['paydown'] = df_1.apply(lambda df: df.endqty/df.principal_bal * df.principal, axis=1)\n",
"paydowns = df_1.paydown.groupby(pd.Grouper(freq='M')).sum()\n",
"temp = pd.concat([paydowns, df.principal_payment, df.accrued_payment], axis=1).fillna(0)\n",
- "turnover = (temp.sum(axis=1)/nav.begbooknav).rolling(12).sum()\n",
+ "turnover = (temp.sum(axis=1)/nav.begbooknav).rolling(12, min_periods=1).sum()\n",
"turnover"
]
},
@@ -210,7 +237,7 @@
"source": [
"################################### Average Traded Volume\n",
"nav = go.get_net_navs()\n",
- "sql_string = \"SELECT * FROM bonds where fund='SERCGMAST'\"\n",
+ "sql_string = \"SELECT * FROM bond_trades where fund='SERCGMAST'\"\n",
"bond_trades = pd.read_sql_query(sql_string, dawn_engine,\n",
" parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n",
" index_col = 'trade_date')\n",
@@ -249,7 +276,7 @@
"#Time series of bond portfolio age (portfolio date - latest buy date of position) - weighted by MV of all bonds.\n",
"#Problem is if we buy the same position again it resets to the holding period to 0\n",
"nav = go.get_net_navs()\n",
- "sql_string = \"SELECT * FROM bonds where fund = 'SERCGMAST' order by trade_date desc\"\n",
+ "sql_string = \"SELECT * FROM bond_trades where fund = 'SERCGMAST' order by trade_date desc\"\n",
"df = pd.read_sql_query(sql_string, dawn_engine,\n",
" parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n",
" index_col = 'trade_date')\n",
@@ -275,14 +302,24 @@
"outputs": [],
"source": [
"################################## Calculate Historical Bond Duration/Yield\n",
- "conn = dbconn(\"dawndb\")\n",
+ "dawnconn = dbconn(\"dawndb\")\n",
"fund = 'SERCGMAST'\n",
- "dates = pd.date_range(datetime.date(2013, 1, 30), datetime.date.today() - MonthEnd(1), freq=\"M\")\n",
- "bond_stats = {}\n",
+ "CBM = CustomBusinessMonthEnd(calendar=np.busdaycalendar())\n",
+ "dates = pd.bdate_range(\"2015-1-31\", datetime.date.today() - MonthEnd(1), \n",
+ " freq=CBM)\n",
+ "bond_stats=pd.DataFrame()\n",
"for d in dates:\n",
- " g = subprime_risk(pd.to_timestamp(d), conn, dbengine(\"rmbs_model\"), fund=fund)\n",
- " bond_stats[d, 'dur'] = sum(g.notional * g.factor * g.modDur)/sum(g.notional * g.factor)\n",
- " bond_stats[d, 'yield'] = sum(g.usd_market_value * g.modDur * g.bond_yield) /sum(g.usd_market_value * g.modDur)"
+ " sub = subprime_risk(d.date(), dawnconn , dbengine(\"rmbs_model\"), fund=fund)\n",
+ " sub=sub[sub.pv1>0]\n",
+ " crt = crt_risk(d.date(), dawnconn , dbengine(\"crt\"), fund=fund)\n",
+ " clo = clo_risk(d.date(), dawnconn , dbconn(\"etdb\"), fund=fund)\n",
+ " bonds = pd.concat([sub,crt,clo]).dropna(subset=['modDur', 'usd_market_value'])\n",
+ " bond_stats.at[d, 'dur']= sum(bonds.notional * bonds.factor * bonds.modDur)/sum(bonds.notional * bonds.factor)\n",
+ " bond_stats.at[d, 'yield'] = (sum(bonds.usd_market_value * bonds.modDur * bonds.bond_yield) /\n",
+ " sum(bonds.usd_market_value * bonds.modDur))\n",
+ " bond_stats.at[d, 'swap_rate'] = (sum(bonds.usd_market_value * bonds.modDur * bonds.swap_rate) /\n",
+ " sum(bonds.usd_market_value * bonds.modDur))\n",
+ "bond_stats['dm'] = bond_stats['yield'] - bond_stats['swap_rate']"
]
},
{
@@ -375,6 +412,7 @@
"outputs": [],
"source": [
"################################## historical cash balances: NAV - bondNAV - IA - IM\n",
+ "#Make sure every strategy is defined\n",
"nav = go.get_net_navs()\n",
"portf = go.get_portfolio()\n",
"strats = pd.read_csv('/home/serenitas/edwin/Python/strat_map.csv')\n",
@@ -386,6 +424,7 @@
"df = df[~df['invid'].isin(['USDLOAN', 'EURLOAN'])]\n",
"df = df[~df['port'].isin(['SER_TEST__SER_TEST', 'GFS_HELPER_BUSINESS_UNIT'])]\n",
"df = df.reset_index().merge(strats, on=['strat', 'custacctname'], how='left')\n",
+ "df = df.dropna(subset=['capital'])\n",
"bondNAV = df[df['capital'].str.contains('Bonds')].groupby(['periodenddate'])['endbooknav'].sum()\n",
"bondNAV.name = 'bondNAV'\n",
"\n",
@@ -410,8 +449,8 @@
"\n",
"#now get it all together\n",
"nav = nav.join(bondNAV, how='left')\n",
- "nav = nav.merge(im, left_on='bdate_end', right_index=True)\n",
- "nav = nav.merge(ia, left_on='bdate_end', right_index=True)\n",
+ "nav = nav.merge(im, left_on='bdate_end', right_index=True, how='left')\n",
+ "nav = nav.merge(ia, left_on='bdate_end', right_index=True, how='left')\n",
"nav['fcash'] = nav['endbooknav'] - nav[['im', 'initial_margin', 'bondNAV']].sum(axis=1)\n",
"nav['fcashPercent'] = nav['fcash']/nav['endbooknav']"
]
@@ -477,6 +516,20 @@
"metadata": {},
"outputs": [],
"source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
}
],
"metadata": {
@@ -495,7 +548,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.9.1-final"
+ "version": "3.10.2"
}
},
"nbformat": 4,