aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/globeop_reports.py158
-rw-r--r--python/notebooks/Allocation Reports.ipynb9
-rw-r--r--python/notebooks/Reto Report.ipynb46
-rw-r--r--python/notebooks/VaR.ipynb37
4 files changed, 146 insertions, 104 deletions
diff --git a/python/globeop_reports.py b/python/globeop_reports.py
index e656ae5f..2de3f13e 100644
--- a/python/globeop_reports.py
+++ b/python/globeop_reports.py
@@ -7,10 +7,14 @@ from quantlib.termstructures.yield_term_structure import YieldTermStructure
import pandas as pd
import numpy as np
+import datetime
+
+etengine = dbengine('etdb')
+dawnengine = dbengine('dawndb')
def get_monthly_pnl(group_by=['identifier']):
sql_string = "SELECT * FROM pnl_reports"
- df_pnl = pd.read_sql_query(sql_string, dbengine('dawndb'),
+ df_pnl = pd.read_sql_query(sql_string, dawnengine,
parse_dates=['date'],
index_col=['date'])
df_pnl['identifier'] = df_pnl.invid.str.replace("_A$", "")
@@ -22,11 +26,11 @@ def get_monthly_pnl(group_by=['identifier']):
def get_portfolio(report_date=None):
if report_date is not None:
sql_string = "SELECT * FROM valuation_reports where periodenddate = %s"
- df = pd.read_sql_query(sql_string, dbengine('dawndb'), parse_dates=['periodenddate'],
+ df = pd.read_sql_query(sql_string, dawnengine, parse_dates=['periodenddate'],
index_col=['periodenddate'], params=[report_date,])
else:
sql_string = "SELECT * FROM valuation_reports"
- df = pd.read_sql_query(sql_string, dbengine('dawndb'), parse_dates=['periodenddate'],
+ df = pd.read_sql_query(sql_string, dawnengine, parse_dates=['periodenddate'],
index_col=['periodenddate'])
df['identifier'] = df.invid.str.replace("_A$", "")
return df
@@ -34,7 +38,7 @@ def get_portfolio(report_date=None):
def trade_performance():
sql_string = "SELECT * FROM bonds"
- df_trades = pd.read_sql_query(sql_string, dbengine('dawndb'),
+ df_trades = pd.read_sql_query(sql_string, dawnengine,
parse_dates={'lastupdate': {'utc': True},
'trade_date': {},
'settle_date': {}})
@@ -73,7 +77,7 @@ def trade_performance():
def get_net_navs():
sql_string = "SELECT * FROM valuation_reports"
- df_val = pd.read_sql_query(sql_string, dbengine('dawndb'), parse_dates=['periodenddate'])
+ df_val = pd.read_sql_query(sql_string, dawnengine, parse_dates=['periodenddate'])
nav = df_val[df_val.fund == 'SERCGMAST'].groupby('periodenddate')['endbooknav'].sum()
nav = nav.resample('M').last()
df = pd.read_csv('/home/serenitas/edwin/Python/subscription_fee_data.csv',
@@ -112,91 +116,97 @@ def calc_trade_performance_stats():
df[df.days_held.notnull()]['days_held'].groupby(pd.Grouper(freq='A')).mean()
-def get_rmbs_pos_df(date=None):
+def hist_pos(date=None, asset_class = 'rmbs'):
- engine = dbengine('dawndb')
end_date = pd.datetime.today() - MonthEnd(1)
-
- if date is not None:
- date = date + MonthEnd(0)
- df = get_portfolio(date)
- df = df.sort_index().loc[:end_date]
- df = df[(df.port == 'MORTGAGES') &
- (df.endbookmv > 0) &
- (df.custacctname == 'V0NSCLMAMB') &
- (df['invid'].str.len() >= 9)]
- sql_string = "SELECT distinct timestamp FROM priced where normalization = 'current_notional'"
- timestamps = pd.read_sql_query(sql_string, engine)
- df = df[['endbookmv', 'endlocalmarketprice', 'identifier']]
+ dates = pd.date_range(datetime.date(2013,1,31), end_date, freq='M')
calc_df = pd.DataFrame()
- yc = YieldTermStructure()
- libor1m = USDLibor(Period(1, Months), yc)
- for d, g in df.groupby(pd.Grouper(freq='M')):
- model_date = pd.to_datetime(timestamps[timestamps.timestamp <= d + DateOffset(days=1)].max()[0]).date()
- yc.link_to(YC(evaluation_date=model_date))
- libor = libor1m.fixing(libor1m.fixing_calendar.adjust(Date.from_datetime(d)))
- sql_string = ("SELECT date(timestamp) as timestamp, cusip, model_version, "
- "pv, moddur, delta_yield, delta_ir "
- "FROM priced where date(timestamp) = %s "
- "and model_version <> 2")
- params_list = (model_date,)
- if d > pd.datetime(2017, 9, 30):
- r = engine.execute("SELECT latest_sim FROM latest_sim(%s)", model_date)
- model_id, = next(r)
- #special case
- if model_date == pd.datetime(2017, 10, 27).date():
- model_id = 4
- sql_string += " AND model_version <>2 AND model_id_sub = %s"
- params_list += (model_id,)
- model = pd.read_sql_query(sql_string, engine, parse_dates=['timestamp'],
- params=params_list)
- model = model[model.pv != 0]
- comb_g = g.loc[d].groupby('identifier').agg({'endbookmv': 'sum',
- 'endlocalmarketprice': 'mean'})
- model = pd.merge(comb_g, model, left_on='identifier', right_on='cusip')
- positions = model.set_index(['cusip', 'model_version']).unstack(1).dropna()
- v1 = positions.xs(1, level='model_version', axis=1)
- v3 = positions.xs(3, level='model_version', axis=1)
- v3 = v3.assign(curr_ntl = v3.endbookmv/v3.endlocalmarketprice *100)
- v3 = v3.assign(b_yield = v3.moddur.apply(lambda x:
- float(yc.zero_rate(x)) - libor))
- v3.b_yield += np.minimum((v1.pv / v1.endlocalmarketprice * 100)
- ** (1/v1.moddur) - 1, 1).dropna()
- v3.delta_yield *= v3.endbookmv / v3.pv
- v3.delta_ir *= np.minimum(1, 1/v3.moddur) * (v3.endlocalmarketprice/100)/ v3.pv * v3.curr_ntl
- calc_df = calc_df.append(v3)
+ for d in dates:
+ if asset_class == 'rmbs':
+ calc_df = calc_df.append(rmbs_pos(d))
+ else:
+ calc_df = calc_df.append(clo_pos(d), sort=True)
+ return calc_df
- return calc_df.reset_index().set_index('timestamp').sort_index()
+def rmbs_pos(date):
+ date = date.date() if isinstance(date, pd.Timestamp) else date
-def get_clo_pos_df(date=None):
+ pos = get_portfolio(date)
+ pos = pos[(pos.port == 'MORTGAGES') &
+ (pos.endbookmv > 0) &
+ (pos.custacctname == 'V0NSCLMAMB') &
+ (pos['invid'].str.len() >= 9)]
+ pos = pos[['endbookmv', 'endlocalmarketprice', 'identifier']]
- etengine = dbengine('etdb')
- dawnengine = dbengine('dawndb')
- end_date = pd.datetime.today() - MonthEnd(1)
+ sql_string = ("SELECT distinct timestamp FROM priced where "
+ "normalization = 'current_notional' and "
+ "model_version = 1 and "
+ "date(timestamp) < %s and date(timestamp) > %s "
+ "order by timestamp desc")
+ timestamps = pd.read_sql_query(sql_string, dawnengine, parse_dates=['timestamp'],
+ params=[date, date - DateOffset(15, 'D')])
+ model_date = (timestamps.loc[0][0]).date()
+
+ yc = YieldTermStructure()
+ libor1m = USDLibor(Period(1, Months), yc)
+ yc.link_to(YC(evaluation_date=model_date))
+ libor = libor1m.fixing(libor1m.fixing_calendar.adjust(Date.from_datetime(date)))
+ sql_string = ("SELECT date(timestamp) as timestamp, cusip, model_version, "
+ "pv, moddur, delta_yield, delta_ir "
+ "FROM priced where date(timestamp) = %s "
+ "and model_version <> 2")
+ params_list = (model_date,)
+ if date > datetime.date(2017, 9, 30):
+ r = dawnengine.execute("SELECT latest_sim FROM latest_sim(%s)",
+ model_date)
+ model_id, = next(r)
+ sql_string += " AND model_id_sub = %s"
+ params_list += (model_id,)
+ model = pd.read_sql_query(sql_string, dawnengine, parse_dates=['timestamp'],
+ params=params_list)
+ model = model[model['pv'] != 0]
+ comb_g = pos.loc[date].groupby('identifier').agg({'endbookmv': 'sum',
+ 'endlocalmarketprice': 'mean'})
+ model = pd.merge(comb_g, model, left_on='identifier', right_on='cusip')
+ positions = model.set_index(['cusip', 'model_version']).unstack(1).dropna()
+ v1 = positions.xs(1, level='model_version', axis=1)
+ v3 = positions.xs(3, level='model_version', axis=1)
+ v3 = v3.assign(curr_ntl = v3.endbookmv/v3.endlocalmarketprice *100)
+ v3 = v3.assign(b_yield = v3.moddur.apply(lambda x:
+ float(yc.zero_rate(x)) - libor))
+ v3.b_yield += np.minimum((v1.pv / v1.endlocalmarketprice * 100)
+ ** (1/v1.moddur) - 1, 1).dropna()
+ v3.delta_yield *= v3.endbookmv / v3.pv
+ v3.delta_ir *= np.minimum(1, 1/v3.moddur) * \
+ (v3.endlocalmarketprice/100)/ v3.pv * v3.curr_ntl
+ return v3.reset_index().set_index('timestamp')
+
+def clo_pos(date):
+
+ date = date.date() if isinstance(date, pd.Timestamp) else date
- if date is not None:
- date = date + MonthEnd(0)
df = get_portfolio(date)
- df = df.sort_index().loc[:end_date]
df = df[(df.port == 'CLO') &
(df.endbookmv > 0) &
(df.custacctname == 'V0NSCLMAMB') &
(df['invid'].str.len() >= 9)]
df = df[['endbookmv', 'endlocalmarketprice', 'identifier']]
- sql_string = "select distinct cusip, identifier from bonds where asset_class = 'CLO'"
- cusip_map = {r['identifier']: r['cusip'] for r in dawnengine.execute(sql_string)}
- df['cusip'] = df['identifier'].replace(cusip_map)
- r = {}
- for d, g in df.groupby(pd.Grouper(freq='M')):
- cusips = g.loc[[g.index[-1]], 'cusip']
+
+ if df.empty is True:
+ return df
+ else:
+ sql_string = "select distinct cusip, identifier from bonds where asset_class = 'CLO'"
+ cusip_map = {r['identifier']: r['cusip'] for r in dawnengine.execute(sql_string)}
+ df['cusip'] = df['identifier'].replace(cusip_map)
+ cusips = df.loc[[df.index[-1]], 'cusip']
placeholders = ",".join(["%s"] * (1 + len(cusips)))
sql_string = f"SELECT * FROM historical_cusip_risk({placeholders})"
model = pd.read_sql_query(sql_string, etengine, parse_dates=['pricingdate'],
- params=(d.date(), *cusips))
+ params=(date, *cusips))
model.index = cusips
- r[d] = g.loc[[g.index[-1]]].set_index('cusip').join(model)
- calc_df = pd.concat(r, names=['date', 'cusip'])
- calc_df['hy_equiv'] = calc_df.delta * calc_df.endbookmv
- return calc_df
+ calc_df = df.loc[[df.index[-1]]].set_index('cusip').join(model)
+ calc_df['hy_equiv'] = calc_df.delta * calc_df.endbookmv
+ calc_df['date'] = date
+ return calc_df.set_index('date')
diff --git a/python/notebooks/Allocation Reports.ipynb b/python/notebooks/Allocation Reports.ipynb
index b20f77d0..e64b84b0 100644
--- a/python/notebooks/Allocation Reports.ipynb
+++ b/python/notebooks/Allocation Reports.ipynb
@@ -245,8 +245,8 @@
"outputs": [],
"source": [
"#Positions and Risks\n",
- "rmbs_pos = go.get_rmbs_pos_df()\n",
- "clo_pos = go.get_clo_pos_df()"
+ "rmbs_pos = go.hist_pos(asset_class = 'rmbs')\n",
+ "clo_pos = go.hist_pos(asset_class = 'clo')"
]
},
{
@@ -302,8 +302,9 @@
"#RMBS Risk - need RMBS Positions and Risks\n",
"sql_string = \"select date, duration, series from on_the_run where index = 'HY'\"\n",
"duration = pd.read_sql_query(sql_string, Sengine, parse_dates=['date'], index_col=['date'])\n",
- "df = pd.merge_asof(rmbs_pos.sort_index(), duration, left_index=True, right_index=True)\n",
- "rmbs_hy_equiv = df.groupby('timestamp').apply(lambda df: sum(df.delta_yield/df.duration * 100))\n",
+ "rmbs_pos = pd.merge_asof(rmbs_pos.sort_index(), duration, left_index=True, right_index=True)\n",
+ "rmbs_pos['hy_equiv'] = rmbs_pos.delta_yield/rmbs_pos.duration * 100\n",
+ "rmbs_pos.groupby('timestamp').sum()\n",
"#hy_equiv.plot()"
]
},
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index 6e62b252..3b97de27 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -123,6 +123,17 @@
"metadata": {},
"outputs": [],
"source": [
+ "position_date = (datetime.date.today() - pd.tseries.offsets.BDay(1)).date()\n",
+ "shock_date = (datetime.date.today() - pd.tseries.offsets.BDay(2)).date()\n",
+ "(position_date, shock_date)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
"#Current tranche and swaptions positions\n",
"t_sql_string = (\"SELECT id, sum(notional * case when protection='Buyer' then -1 else 1 end) \"\n",
" \"OVER (partition by security_id, attach) AS ntl_agg \"\n",
@@ -138,50 +149,55 @@
" \"AND trade_date <= %s\")\n",
"with conn.cursor() as c:\n",
" #Get Tranche Trade Ids\n",
- " c.execute(t_sql_string, (date,))\n",
+ " c.execute(t_sql_string, (position_date,))\n",
" t_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n",
" #Get Swaption Trade Ids\n",
- " c.execute(swaption_sql_string, (date, date))\n",
+ " c.execute(swaption_sql_string, (position_date, position_date))\n",
" swaption_trades = c.fetchall()\n",
" #Get Index/deltas Trade Ids\n",
- " c.execute(index_sql_string, (date,))\n",
+ " c.execute(index_sql_string, (position_date,))\n",
" index_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n",
" \n",
"portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid in t_trade_ids],\n",
- " t_trade_ids)\n",
+ " ['trn_'+ str(a) for a in t_trade_ids])\n",
"for row in swaption_trades:\n",
- " option_delta = CreditIndex(row[1].split()[1], row[1].split()[3][1:], '5yr', date)\n",
+ " option_delta = CreditIndex(row[1].split()[1], row[1].split()[3][1:], '5yr', position_date)\n",
" option_delta.mark()\n",
" portf.add_trade(BlackSwaption.from_tradeid(row[0], option_delta), 'opt_' + str(row[0]))\n",
"for index_id in index_trade_ids:\n",
" portf.add_trade(CreditIndex.from_tradeid(index_id), 'index_' + str(index_id))\n",
" \n",
- "#Update manually - positive notional = long risk\n",
- "non_trancheSwap_risk_notional = 49119912 \n",
- "\n",
- "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = date, notional = -non_trancheSwap_risk_notional), 'bond')\n",
+ "#get bond risks:\n",
+ "rmbs_pos = go.rmbs_pos(position_date)\n",
+ "r = serenitasdb.execute(\"select duration from on_the_run where index = 'HY' and date = %s\",\n",
+ " shock_date)\n",
+ "duration, = next(r)\n",
+ "rmbs_pos['hy_equiv'] = rmbs_pos['delta_yield']/duration * 100\n",
+ "notional = rmbs_pos['hy_equiv'].sum()\n",
+ "notional = 47633776\n",
+ "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = shock_date, notional = -notional), 'rmbs_bond')\n",
" \n",
- "portf.value_date = date\n",
+ "portf.value_date = shock_date\n",
"portf.mark(interp_method=\"bivariate_linear\")\n",
"portf.reset_pv()\n",
"\n",
"vol_surface = {}\n",
"for trade in portf.swaptions:\n",
" vs = BlackSwaptionVolSurface(trade.index.index_type, trade.index.series, \n",
- " value_date=date, interp_method = \"bivariate_linear\")\n",
+ " value_date=shock_date, interp_method = \"bivariate_linear\")\n",
" vol_surface[trade.index.index_type + trade.index.series] = vs[vs.list(option_type='payer')[-1]]\n",
"vol_shock = [0]\n",
"corr_shock = [0]\n",
- "spread_shock = widen + tighten\n",
- "date_range = [pd.Timestamp(date)]\n",
+ "spread_shock = tighten + [0] + widen\n",
+ "date_range = [pd.Timestamp(shock_date)]\n",
"\n",
- "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\"],\n",
+ "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\", \"hy_equiv\"],\n",
" spread_shock=spread_shock,\n",
" vol_shock=vol_shock,\n",
" corr_shock=corr_shock,\n",
" vol_surface=vol_surface)\n",
"\n",
- "scens.sum(axis=1)"
+ "scens.xs('pnl', level=1, axis=1).sum(axis=1)"
]
},
{
diff --git a/python/notebooks/VaR.ipynb b/python/notebooks/VaR.ipynb
index de2c26a1..ba59d717 100644
--- a/python/notebooks/VaR.ipynb
+++ b/python/notebooks/VaR.ipynb
@@ -16,6 +16,7 @@
"import exploration.VaR as var\n",
"import pandas as pd\n",
"import numpy as np\n",
+ "import globeop_reports as go\n",
"\n",
"conn = dbconn('dawndb')\n",
"dawndb = dbengine('dawndb')\n",
@@ -84,10 +85,10 @@
"outputs": [],
"source": [
"#Import the IM at the FCM account: calculate the IM share of different strategies as a share of VaR\n",
- "filename = date.strftime('%Y%m%d') + \"_OTC_MARGIN.csv\"\n",
- "margin_df = pd.read_csv(\"/home/serenitas/Daily/SG_reports/\" + filename, index_col='System Currency')\n",
- "mortg_hedge_im = mort_hedge_var + mort_hedge_var/(mort_hedge_var + ig_curve_var) * margin_df.loc[('USD', 'SG Settlement Margin')]\n",
- "mortg_hedge_im"
+ "#filename = date.strftime('%Y%m%d') + \"_OTC_MARGIN.csv\"\n",
+ "#margin_df = pd.read_csv(\"/home/serenitas/Daily/SG_reports/\" + filename, index_col='System Currency')\n",
+ "#mortg_hedge_im = mort_hedge_var + mort_hedge_var/(mort_hedge_var + ig_curve_var) * margin_df.loc[('USD', 'SG Settlement Margin')]\n",
+ "#mortg_hedge_im"
]
},
{
@@ -163,7 +164,7 @@
" index_trade_ids = [dealid for dealid, ntl in c if ntl != 0]\n",
" \n",
"portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid in t_trade_ids],\n",
- " t_trade_ids)\n",
+ " ['trn_'+ str(a) for a in t_trade_ids])\n",
"for row in swaption_trades:\n",
" option_delta = CreditIndex(row[1].split()[1], row[1].split()[3][1:], '5yr', position_date)\n",
" option_delta.mark()\n",
@@ -171,9 +172,15 @@
"for index_id in index_trade_ids:\n",
" portf.add_trade(CreditIndex.from_tradeid(index_id), 'index_' + str(index_id))\n",
" \n",
- "#Update manually - positive notional = long risk\n",
- "non_trancheSwap_risk_notional = 49119912 \n",
- "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = shock_date, notional = -non_trancheSwap_risk_notional), 'bond')\n",
+ "#get bond risks:\n",
+ "rmbs_pos = go.rmbs_pos(position_date)\n",
+ "r = serenitasdb.execute(\"select duration from on_the_run where index = 'HY' and date = %s\",\n",
+ " shock_date)\n",
+ "duration, = next(r)\n",
+ "rmbs_pos['hy_equiv'] = rmbs_pos['delta_yield']/duration * 100\n",
+ "notional\n",
+ "portf.add_trade(CreditIndex('HY', on_the_run('HY'), '5yr', value_date = shock_date, \n",
+ " notional = rmbs_pos['hy_equiv'].sum()), 'rmbs_bond')\n",
" \n",
"portf.value_date = shock_date\n",
"portf.mark(interp_method=\"bivariate_linear\")\n",
@@ -195,7 +202,7 @@
" corr_shock=corr_shock,\n",
" vol_surface=vol_surface)\n",
"\n",
- "scens.xs('pnl', level=1).sum(axis=1)"
+ "scens.xs('pnl', level=1, axis=1).sum(axis=1)"
]
},
{
@@ -205,12 +212,20 @@
"outputs": [],
"source": [
"spread_shock = np.arange(-.4, 2.2, .2)\n",
- "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\"],\n",
+ "\n",
+ "scens = run_portfolio_scenarios(portf, date_range, params=[\"pnl\", \"hy_equiv\"],\n",
" spread_shock=spread_shock,\n",
" vol_shock=vol_shock,\n",
" corr_shock=corr_shock,\n",
" vol_surface=vol_surface)\n",
- "scens.sum(axis=1)\n",
+ "results = {}\n",
+ "for x in ['pnl', 'hy_equiv']:\n",
+ " df = scens.xs(x, level=1, axis=1)\n",
+ " for y in ['trn', 'opt', 'index']:\n",
+ " columns = [col for col in df.columns if 'trn' in col]\n",
+ " results[(x,y)] = df[columns].sum(axis=1)\n",
+ " \n",
+ "hy_equiv = scens.xs('hy_equiv', level=1, axis=1).sum(axis=1)\n",
"\n",
"#risk_notional = [t.notional * t._index.duration for t in portf.indices]\n",
"#portf.trades[0]._index.duration()"