aboutsummaryrefslogtreecommitdiffstats
path: root/python/notebooks
diff options
context:
space:
mode:
Diffstat (limited to 'python/notebooks')
-rw-r--r--python/notebooks/Reto Report.ipynb100
1 files changed, 90 insertions, 10 deletions
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index a03c2e0f..542d324c 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -84,6 +84,15 @@
"metadata": {},
"outputs": [],
"source": [
+ "results.to_clipboard()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
"################################### Find the strategies that are not defined: undefined needs to be mapped in strat_map\n",
"strats = pd.read_csv('/home/serenitas/edwin/Python/strat_map.csv')\n",
"nav = go.get_net_navs()\n",
@@ -113,7 +122,7 @@
"#Actually: Rolling 12 months sum of (total bond sales proceeds + paydown)/monthly NAV\n",
"nav = go.get_net_navs()\n",
"fund='SERCGMAST'\n",
- "sql_string = \"SELECT * FROM bonds WHERE NOT buysell and fund = %s\"\n",
+ "sql_string = \"SELECT * FROM bond_trades WHERE NOT buysell and fund = %s\"\n",
"df = pd.read_sql_query(sql_string, dawn_engine,\n",
" parse_dates={'lastupdate':{'utc':True}, 'trade_date': {}, 'settle_date':{}},\n",
" params=[fund,],\n",
@@ -289,12 +298,32 @@
" lambda df: df.loc[df.index[-1]]\n",
" )\n",
"df = df[~df['invid'].isin(['USDLOAN', 'EURLOAN'])]\n",
- "pvs = df['endbooknav'].groupby(['periodenddate', df['endbooknav'] >0]).sum().unstack().rename(\n",
+ "df = df[~df['port'].isin(['SER_TEST__SER_TEST', 'GFS_HELPER_BUSINESS_UNIT'])]\n",
+ "pvs = df.groupby(['periodenddate', df['endbooknav'] >0])['endbooknav'].sum().unstack().rename(\n",
" columns={True:'endbooknav_pos', False:'endbooknav_neg'})\n",
"nav = nav.merge(pvs, left_index=True, right_index=True)\n",
"nav['long_leverage'] = nav['endbooknav_pos']/nav.endbooknav\n",
"nav['gross_leverage'] = (nav['endbooknav_pos']-nav['endbooknav_neg'])/nav.endbooknav\n",
- "print (\"positive pv/nav: \" + str(nav['long_leverage'].mean()), \"gross pv/nav: \" + str(nav['gross_leverage'].mean()))"
+ "print (\"positive pv/nav: \" + str(nav['long_leverage'].mean()), \"gross pv/nav: \" + str(nav['gross_leverage'].mean()))\n",
+ "################################### Broken out by stratey too\n",
+ "pvs = df.groupby(['periodenddate', 'port', df['endbooknav'] >0])['endbooknav'].sum().unstack().rename(\n",
+ " columns={True:'endbooknav_pos', False:'endbooknav_neg'}).unstack(level=1)\n",
+ "data={}\n",
+ "for name in ['endbooknav_pos', 'endbooknav_neg']:\n",
+ " pv_gross = pvs.xs(name, level = 'endbooknav', axis=1)\n",
+ " pv_gross.loc[:, 'TRANCHE'] = pv_gross[['IG', 'HY', 'LQD_TRANCH','TRANCHE']].fillna(0).sum(axis=1)\n",
+ " pv_gross.rename({'CURVE': 'CREDIT CURVES',\n",
+ " 'HEDGE_MAC': 'MACRO HEDGE',\n",
+ " 'IR': 'INTEREST RATE DERIVATIVES',\n",
+ " 'MORTGAGES': 'MORTGAGE BONDS',\n",
+ " 'OPTIONS': 'CREDIT OPTIONS',\n",
+ " 'STRUCTURED': 'CSO BONDS',\n",
+ " 'TRANCHE': 'CREDIT TRANCHES'}, axis=1, inplace=True)\n",
+ " pv_gross.drop(['LQD_TRANCH', 'IG', 'HY'], inplace=True, axis=1)\n",
+ " pv_gross = pv_gross.merge(nav['endbooknav'], left_index=True, right_index=True)\n",
+ " data[name] = pv_gross.iloc[:,:-1].div(pv_gross['endbooknav'], axis=0)\n",
+ "data['endbooknav_pos'].to_clipboard()\n",
+ "data['endbooknav_neg'].to_clipboard()"
]
},
{
@@ -345,19 +374,70 @@
"metadata": {},
"outputs": [],
"source": [
+ "################################## historical cash balances: NAV - bondNAV - IA - IM\n",
+ "nav = go.get_net_navs()\n",
+ "portf = go.get_portfolio()\n",
+ "strats = pd.read_csv('/home/serenitas/edwin/Python/strat_map.csv')\n",
+ "nav['bdate_end'] = pd.bdate_range(start=nav.index.min(), end=nav.index.max(), freq=\"BM\")\n",
+ "\n",
+ "df = portf.groupby(pd.Grouper(freq=\"M\"), group_keys=False).apply(\n",
+ " lambda df: df.loc[df.index[-1]]\n",
+ " )\n",
+ "df = df[~df['invid'].isin(['USDLOAN', 'EURLOAN'])]\n",
+ "df = df[~df['port'].isin(['SER_TEST__SER_TEST', 'GFS_HELPER_BUSINESS_UNIT'])]\n",
+ "df = df.reset_index().merge(strats, on=['strat', 'custacctname'], how='left')\n",
+ "bondNAV = df[df['capital'].str.contains('Bonds')].groupby(['periodenddate'])['endbooknav'].sum()\n",
+ "bondNAV.name = 'bondNAV'\n",
+ "\n",
+ "#now get IM - note need to deal with EUR, it is not correct\n",
+ "fx = pd.read_sql_query(\"select date, eurusd from fx\", dawn_engine, parse_dates='date', index_col = 'date')\n",
+ "sql_string = \"select date, currency, current_im from fcm_moneyline where currency <> 'ZZZZZ'\"\n",
+ "im = pd.read_sql_query(sql_string, dawn_engine, parse_dates='date', index_col = 'date').join(fx)\n",
+ "im['im'] = im.apply(lambda x: x.eurusd * x.current_im if x.currency == 'EUR' else x.current_im, axis =1)\n",
+ "im = im['im'].groupby('date').sum()\n",
+ "\n",
+ "#now get IA - from tranches, swaptions and interest rate derivatives\n",
+ "ia = pd.DataFrame()\n",
+ "sqlt = \"SELECT initial_margin FROM tranche_risk_agg(%s)\"\n",
+ "sqls = \"SELECT initial_margin from list_swaption_positions_and_risks(%s)\"\n",
+ "for d in nav['bdate_end']:\n",
+ " for s in [sqlt, sqls]:\n",
+ " temp = pd.read_sql_query(s, dawn_engine, params=[d.date(),])\n",
+ " temp['date'] = d.date()\n",
+ " ia = ia.append(temp)\n",
+ "ia = ia.groupby('date').sum()\n",
+ "ia.index = pd.to_datetime(ia.index)\n",
+ "\n",
+ "#now get it all together\n",
+ "nav = nav.join(bondNAV, how='left')\n",
+ "nav = nav.merge(im, left_on='bdate_end', right_index=True)\n",
+ "nav = nav.merge(ia, left_on='bdate_end', right_index=True)\n",
+ "nav['fcash'] = nav['endbooknav'] - nav[['im', 'initial_margin', 'bondNAV']].sum(axis=1)\n",
+ "nav['fcashPercent'] = nav['fcash']/nav['endbooknav']"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
"################################## Historical Notioinals and HY Equiv\n",
- "dates = pd.date_range(datetime.date(2013, 1, 30), datetime.datetime.today() - MonthEnd(1), freq=\"BM\")\n",
+ "dates = pd.date_range(datetime.date(2019, 12, 30), datetime.datetime.today() - MonthEnd(1), freq=\"BM\")\n",
"#look for a day with HY quotes... we need that to construct HY Equiv\n",
"sql_string = 'select distinct(date) from index_quotes where index = %s order by date asc'\n",
"hy_dates = pd.read_sql_query(sql_string, serenitas_engine, parse_dates = 'date', params=['HY',])\n",
"def nearest(items, pivot):\n",
" return min(items, key=lambda x: abs(x - pivot))\n",
- "hy_dates.apply(lambda x: nearest(dates, x))\n",
- "pd.merge_asof(pd.DataFrame(dates), hy_dates, left_index=True, right_index=True)\n",
+ "#hy_dates.apply(lambda x: nearest(dates, x))\n",
+ "#pd.merge_asof(pd.DataFrame(dates), hy_dates, left_on='0', right_on='date')\n",
+ "dates = pd.merge_asof(pd.DataFrame(dates, columns=['date']), hy_dates, left_on='date', right_on='date')\n",
"portfs = {}\n",
- "for d in dates:\n",
+ "hye = {}\n",
+ "for d in dates.date:\n",
" d = d.date()\n",
- " portfs[d] = build_portf(d)"
+ " portfs[d], _ = build_portfolio(d, d)\n",
+ " hye[d] = portfs[d].hy_equiv"
]
},
{
@@ -401,9 +481,9 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3.8.1 64-bit",
+ "display_name": "Python 3.9.1 64-bit",
"language": "python",
- "name": "python38164bitc40c8740e5d542d7959acb14be96f4f3"
+ "name": "python39164bit6ddd573894c04d6a858a9a58880cc9d4"
},
"language_info": {
"codemirror_mode": {