aboutsummaryrefslogtreecommitdiffstats
path: root/python/exploration/portfolio_var.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/exploration/portfolio_var.py')
-rw-r--r--python/exploration/portfolio_var.py124
1 files changed, 70 insertions, 54 deletions
diff --git a/python/exploration/portfolio_var.py b/python/exploration/portfolio_var.py
index 5b0006f9..7fba2ba2 100644
--- a/python/exploration/portfolio_var.py
+++ b/python/exploration/portfolio_var.py
@@ -1,81 +1,97 @@
from index_data import get_index_quotes, index_returns, _serenitas_engine
from analytics import Swaption, BlackSwaption, Index, VolatilitySurface, Portfolio
from db import dbengine, dbconn
-from pandas.tseries.offsets import BDay
-import numpy as np
+from pandas.tseries.offsets import BDay, BMonthEnd
+from copy import deepcopy
+import numpy as np
import pandas as pd
import math
import datetime
+import exploration.curve_trades as cv
+
dawndb = dbengine('dawndb')
serenitasdb = dbengine('serenitasdb')
-def get_spreads():
- ## look at returns
- index, df, df1, returns = {}, {}, {}, {}
- shock = []
- shock_values = [.95,.05]
-
- for ind in ['HY','IG']:
- index[ind] = Index.from_name(ind, 28, '5yr')
- df[ind] = index_returns(index=ind, series=[23, 24, 25, 26, 27, 28], tenor=['3yr', '5yr', '7yr', '10yr'], per = 22)
- #df1[ind] = get_index_quotes(index=ind, series=[23, 24, 25, 26, 27, 28], tenor=['3yr', '5yr', '7yr', '10yr'])
- returns[ind] = df[ind].price_return.unstack(-1).dropna().groupby(level='date').nth(-1)
- for percent in shock_values: #.95 = ~2 st dev tightening, .05 = widening
- shock.append([ind, percent, returns[ind].quantile(percent)['5yr']])
-
- shock = pd.DataFrame.from_records(shock, columns=['index', 'percent', 'shock'], index=['index', 'percent'])
-
- index['IG'].spread = 62
- index['HY'].price = 107.5
-
-def get_pos():
- #get position by strategy folder
- trade_date = datetime.date.today() - BDay(1)
- df = pd.read_sql_query("SELECT security_id, notional * case when protection = 'Buyer' then -1 else 1 end as notional, " \
- "maturity, folder FROM cds where trade_date < %s and termination_amount is NULL",
- dawndb, params = (trade_date,), parse_dates=['maturity'])
- df = df.groupby(['security_id','maturity','folder']).agg(sum)
- df = df[df.notional!=0].reset_index()
-
- df1 = pd.read_sql_query("SELECT redindexcode as security_id, index, series, indexfactor from index_version where lastdate = 'infinity'", serenitasdb)
- df2 = pd.read_sql_query("SELECT * from index_maturity", serenitasdb, parse_dates=['maturity'])
- df3 = pd.read_sql_query("SELECT index, series, tenor, duration " \
- " FROM index_quotes where date = %s",
- dawndb, params = (trade_date,))
-
- df = df.merge(df1, on='security_id')
- df = df.merge(df2, how='inner', on=['index','maturity', 'series'])
- df = df.merge(df3, on=['index','series','tenor'])
-
- df['ontr_notional'] = df.apply(lambda df: df.notional * df.indexfactor/100 * df.duration/(index[df['index']].DV01/index[df['index']].notional*10000), axis = 1)
- for percent in shock_values:
- df[percent] = df.apply(lambda df: df['ontr_notional'] * shock.loc[df['index'],percent], axis = 1)
-
- df1 = df.groupby(['folder']).agg(sum)[[.95,.05]]
- df1 = df1.min(axis = 1)
-
def on_the_run(index):
r = _serenitas_engine.execute("SELECT max(series) FROM index_version WHERE index=%s",
(index,))
series, = r.fetchone()
return series
-def rel_spread_diff(index='HY', rolling=10):
+def rel_spread_diff(report_date = datetime.date.today(), index='HY', rolling=10):
otr = on_the_run(index)
## look at spreads
df = get_index_quotes(index, list(range(otr - rolling, otr + 1)),
tenor=['3yr', '5yr', '7yr', '10yr'], years=5)
- duration = df.duration.xs(('5yr',28), level = ['tenor','series'])[-1]
+ duration = df.duration.xs((report_date,'5yr', otr), level = ['date','tenor', 'series'])[-1]
- df = df.xs('5yr', level = 'tenor')['closespread'].groupby(['date','series']).last()
- df = df.loc['2013-01-15':]
+ df = df.xs('5yr', level='tenor')['closespread'].groupby(['date', 'series']).last()
+ df = df.loc['2013-01-15':report_date]
curr_spread = df.iloc[-1]
- df = df.unstack(-1).pct_change(freq = '22B').stack()
+ df = df.unstack(-1).pct_change(freq='22B').stack()
return df.groupby('date').last(), curr_spread, duration
-for x, y in stress.iteritems():
- portf.ref =
+def get_pos(report_date):
+ #report_date = datetime.date(2017,10,31)
+ strats = ["HEDGE_CSO", "HEDGE_MBS", "HEDGE_CLO", "HEDGE_MAC",
+ "SER_IGINX", "SER_HYINX", "SER_IGCURVE", "SER_ITRXCURVE",
+ "IGOPTDEL", "HYOPTDEL"]
+ r = {}
+ for st in strats:
+ r[st] = pd.read_sql_query("SELECT * from list_cds_marks(%s, %s)",
+ dawndb, params=(report_date, st))
+ return pd.concat(r, names=['strategy']).reset_index()
+
+def cleared_cds_margins(report_date=datetime.date.today()):
+
+ df = get_pos(report_date)
+
+ #Cap Allocation for Deltas
+ percentile = .95 #monthly 90%tile case...
+ shocks, widen, tighten, onTR_dur, onTR_spread = {}, {}, {}, {}, {}
+ for ind in ['IG', 'HY', 'EU']:
+ shocks[ind], onTR_spread[ind], onTR_dur[ind] = rel_spread_diff(report_date, index=ind)
+ widen[ind] = shocks[ind].quantile(percentile)
+ tighten[ind] = shocks[ind].quantile(1-percentile)
+
+ df['onTR_notional'] = df.apply(lambda df:
+ df.notional * df.factor * df.duration / onTR_dur[df.p_index], axis=1)
+ df['widen'] = df.apply(lambda df:
+ df.onTR_notional * onTR_spread[df.p_index] * onTR_dur[df.p_index] * widen[df.p_index]/10000, axis=1)
+ df['tighten'] = df.apply(lambda df:
+ df.onTR_notional * onTR_spread[df.p_index] * onTR_dur[df.p_index] * tighten[df.p_index]/10000, axis=1)
+ delta_alloc = df.groupby('strategy').sum()
+ delta_alloc['total'] = delta_alloc.apply(lambda df: max(abs(df.widen), abs(df.tighten)), axis=1)
+ return delta_alloc
+
+def index_curve_margins(report_date=datetime.date.today()):
+
+ #Curve - whole curve flattening and steepening
+ df = get_pos(report_date)
+ diff = cv.curve_spread_diff()
+ diff = diff.pct_change(freq='22B').dropna() * diff.iloc[-1]
+
+ ctrades = df[df.strategy.isin(['SER_IGCURVE','SER_ITRXCURVE'])]
+ trades = []
+ for ind, t in ctrades.iterrows():
+ trades.append(Index.from_name(t.p_index, t.p_series, t.tenor, report_date, t.notional * (-1 if t.notional < 0 else 1)))
+ trades[-1].spread = t.closespread
+ trades[-1].direction = 'Seller' if t.notional > 0 else 'Buyer'
+ portf = Portfolio(trades)
+
+ #uses on-the-run to apply to any-run but let's start there
+ pv_diff = []
+ for stress in [diff.quantile(.1), diff.quantile(.9)]:
+ portf1 = deepcopy(portf)
+ for trade in portf1.indices:
+ tenor = trade.name.split()[4]
+ if tenor == '3Y':
+ trade.spread -= stress.loc['3-5']
+ elif tenor == '10Y':
+ trade.spread += stress.loc['5-10']
+ pv_diff.append(portf1.pv - portf.pv)
+ return -min(pv_diff)