aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/analytics/index.py3
-rw-r--r--python/analytics/option.py2
-rw-r--r--python/exploration/option_trades.py91
-rw-r--r--python/mark_backtest_underpar.py1
-rw-r--r--python/option_trades_et.py95
5 files changed, 126 insertions, 66 deletions
diff --git a/python/analytics/index.py b/python/analytics/index.py
index 8137f63f..3628721f 100644
--- a/python/analytics/index.py
+++ b/python/analytics/index.py
@@ -95,6 +95,7 @@ class Index(object):
self._step_in_date, self._value_date,
[self.end_date], np.array([self._spread]), np.zeros(1),
np.array([self.recovery]))
+
self._risky_annuity = self._fee_leg.pv(self.trade_date, self._step_in_date,
self._value_date, self._yc,
self._sc, False)
@@ -279,6 +280,8 @@ class Index(object):
accrued_str = "Accrued ({} Days)".format(self.days_accrued)
else:
accrued_str = "Accrued ({} Day)".format(self.days_accrued)
+ if not self.spread:
+ raise ValueError("Market spread is missing!")
s = ["{:<20}\t{:>15}".format("CDS Index", colored(self.name, attrs = ['bold'])),
"",
"{:<20}\t{:>15}".format("Trade Date", ('{:%m/%d/%y}'.
diff --git a/python/analytics/option.py b/python/analytics/option.py
index e2177d6d..4a61e90f 100644
--- a/python/analytics/option.py
+++ b/python/analytics/option.py
@@ -148,7 +148,7 @@ class Swaption(ForwardIndex):
args = (self.forward_pv, self.exercise_date, self.exercise_date_settle,
self.index, self._forward_yc, tilt, self._w)
eta = 1.05
- a = self.index.spread
+ a = self.index.spread * 0.99
b = a * eta
while True:
if calib(*((b,) + args)) > 0:
diff --git a/python/exploration/option_trades.py b/python/exploration/option_trades.py
index 611fdc83..f9685fbb 100644
--- a/python/exploration/option_trades.py
+++ b/python/exploration/option_trades.py
@@ -8,11 +8,12 @@ from pandas.tseries.offsets import BDay
from arch import arch_model
from db import dbengine, dbconn
from scipy.interpolate import interp1d
-from analytics import Index
+from analytics import Index, ForwardIndex
from index_data import index_returns
+
serenitasdb = dbengine('serenitasdb')
-def realized_vol(index, series, tenor='5yr', date=None, years=None):
+def realized_vol(index, series, tenor='5yr', date=None, years=None, return_type='spread'):
"""computes the realized spread volatility"""
if date is None:
if years is None:
@@ -20,7 +21,7 @@ def realized_vol(index, series, tenor='5yr', date=None, years=None):
date = (pd.Timestamp.now() - pd.DateOffset(years=years)).date()
returns = index_returns(index=index, series=series, tenor=tenor, years=None)
# GARCH(1,1) volatility process with constant mean
- am = arch_model(returns)
+ am = arch_model(returns[return_type+'_return'])
res = am.fit(update_freq=0, disp='off')
return (res.conditional_volatility * math.sqrt(252), res)
@@ -37,45 +38,67 @@ def lr_var(res):
var = res.params[names[0]]/(1 - res.params[names[1:]])
return math.sqrt(var * 252)
-def atm_vol_fun(v, ref_is_price=False, moneyness=0.2):
+def atm_vol_fun(v, moneyness=0.2, index=None):
f = interp1d(v.strike.values, v.vol.values, fill_value='extrapolate')
- atm_val = v['fwdspread'].iat[0]
- otm_val = atm_val * (1 + moneyness) ## doesn't make sense for HY
+ if index is None:
+ atm_val = v['fwdspread'].iat[0]
+ otm_val = atm_val * (1 + moneyness)
+ else:
+ if 'HY' in index.name:
+ index.price = v['ref'].iat[0]
+ atm_val = ForwardIndex(index, v.index.get_level_values('expiry')[0], ref_is_price=True).forward_spread
+ otm_val = atm_val * (1 + moneyness)
+ index.spread = atm_val
+ atm_val = index.price
+ index.spread = otm_val
+ otm_val = index.price
+ else:
+ index.spread = v['ref'].iat[0]
+ atm_val = ForwardIndex(index, v.index.get_level_values('expiry')[0], ref_is_price=False).forward_spread
+ otm_val = atm_val * (1 + moneyness)
return pd.Series(f(np.array([atm_val, otm_val])), index = ['atm_vol', 'otm_vol'])
-def atm_vol(index, series, moneyness=0.2):
- df = pd.read_sql_query('SELECT quotedate, expiry, strike, vol from swaption_quotes ' \
- 'WHERE index = %s and series = %s',
- serenitasdb, index_col=['quotedate', 'expiry'],
- params = (index.upper(), series))
- index_data = pd.read_sql_query(
- 'SELECT quotedate, expiry, fwdspread from swaption_ref_quotes ' \
- 'WHERE index= %s and series = %s',
- serenitasdb, index_col = ['quotedate', 'expiry'],
- params = (index.upper(), series))
-
- df = df.join(index_data)
- df = df.groupby(level=['quotedate', 'expiry']).filter(lambda x: len(x)>=2)
- df = df.groupby(level=['quotedate', 'expiry']).apply(atm_vol_fun, index=="HY", moneyness)
- df = df.reset_index(level=-1) #move expiry back to the column
- return df
-
-def atm_vol_date(index, date):
- df = pd.read_sql_query('SELECT quotedate, series, expiry, strike, vol ' \
+def atm_vol(index, date, series = None, moneyness=0.2):
+ if series is None:
+ df = pd.read_sql_query('SELECT quotedate, series, expiry, strike, vol ' \
'FROM swaption_quotes ' \
'WHERE index = %s and quotedate >= %s',
serenitasdb,
index_col=['quotedate', 'expiry', 'series'],
params=(index.upper(), date), parse_dates=['quotedate'])
- index_data = pd.read_sql_query(
- 'SELECT quotedate, expiry, series, fwdspread FROM swaption_ref_quotes ' \
- 'WHERE index= %s and quotedate >= %s',
- serenitasdb, index_col=['quotedate', 'expiry', 'series'],
- params = (index.upper(), date), parse_dates=['quotedate'])
+ index_data = pd.read_sql_query(
+ 'SELECT quotedate, expiry, series, ref, fwdspread FROM swaption_ref_quotes ' \
+ 'WHERE index= %s and quotedate >= %s',
+ serenitasdb, index_col=['quotedate', 'expiry', 'series'],
+ params=(index.upper(), date), parse_dates=['quotedate'])
+ else:
+ df = pd.read_sql_query('SELECT quotedate, series, expiry, strike, vol from swaption_quotes ' \
+ 'WHERE index = %s and series = %s and quotedate >= %s',
+ serenitasdb, index_col=['quotedate', 'expiry', 'series'],
+ params = (index.upper(), series, date))
+ index_data = pd.read_sql_query(
+ 'SELECT quotedate, series, expiry, ref, fwdspread from swaption_ref_quotes ' \
+ 'WHERE index= %s and series = %s and quotedate >= %s',
+ serenitasdb, index_col = ['quotedate', 'expiry', 'series'],
+ params = (index.upper(), series, date))
+
df = df.join(index_data)
- df = df.groupby(df.index).filter(lambda x: len(x)>=2)
- df = df.groupby(level=['quotedate', 'expiry', 'series']).apply(atm_vol_fun)
- df = df.reset_index(level=['expiry', 'series']) #move expiry and series back to the columns
+ df = df.groupby(df.index).filter(lambda x: len(x) >= 2)
+
+ df1 = atm_vol_calc(df, index)
+ return df1
+
+def atm_vol_calc(df, index):
+ g_temp = {}
+ for s, g1 in df.groupby(level='series'):
+ index_obj = Index.from_name(index, s, '5yr')
+ for date, g2 in g1.groupby(g1.index.get_level_values(0)):
+ index_obj.trade_date = date.date()
+ for expiry, g3 in g2.groupby(g2.index.get_level_values(1)):
+ g_temp[(date, expiry, s)] = atm_vol_fun(g3, index=index_obj)
+ df = pd.concat(g_temp, names=['quotedate', 'expiry', 'series'])
+ df = df.unstack(-1)
+ df = df.reset_index(level=['expiry', 'series'])
return df
def rolling_vol(df, col='atm_vol', term=[3]):
@@ -93,7 +116,7 @@ def rolling_vol(df, col='atm_vol', term=[3]):
return df.dropna()
def vol_var(percentile=0.99, index='IG'):
- df = atm_vol_date("IG", datetime.date(2014, 6, 11))
+ df = atm_vol(index, datetime.date(2014, 6, 11))
df = rolling_vol(df, term=[1,2,3])
df = df.sort_index()
df = df.groupby(df.index.date).nth(-1)
diff --git a/python/mark_backtest_underpar.py b/python/mark_backtest_underpar.py
index c6128b13..b17cc26f 100644
--- a/python/mark_backtest_underpar.py
+++ b/python/mark_backtest_underpar.py
@@ -7,6 +7,7 @@ import statsmodels.api as sm
from statsmodels.formula.api import gls
import seaborn as sb
+
df = pd.read_sql_table('external_marks_mapped', dbengine('dawndb'),
parse_dates=['date'])
df = df[df.source.notnull()]
diff --git a/python/option_trades_et.py b/python/option_trades_et.py
index 9f27e4b3..8b64e7b7 100644
--- a/python/option_trades_et.py
+++ b/python/option_trades_et.py
@@ -59,27 +59,21 @@ def beta_calc():
ltvar = lr_var(resHY)
graphit = compute_allocation(all_tenors)
-def build_swaption(index = 'IG', series = 27, expiry = datetime.date(2017, 4, 19), strike = 65, ref = 62, trade_date = datetime.date(2017, 2, 23), t_range = None):
+def build_swaption(index, series, expiry, strike, ref, trade_date, t_range= None, spread_range = None):
index_obj = Index.from_name(index, series, '5yr', trade_date)
swap_obj = opt.Swaption(index_obj, expiry, strike, option_type="payer")
swap_obj.notional = 100000000
if t_range is None:
t_range = pd.bdate_range(trade_date, expiry- BDay(), freq = '5B')
+ if spread_range is None:
+ spread_range = pd.Series(np.arange(ref - 10, ref +19, 5))
+
vol_range = pd.Series(np.arange(25, 60, 5)) #not inclusive of end point
- spread_range = pd.Series(np.arange(ref - 10, ref +19, 5))
df = pd.DataFrame(index = pd.MultiIndex.from_product([t_range, spread_range, vol_range], names = ['date', 'spread', 'vol']), columns = ['pv'])
df = df.reset_index()
- def manual_index_update(index, date):
- index._yc = index._yc.expected_forward_curve(date)
- index._trade_date = date
- index._step_in_date = index.trade_date + datetime.timedelta(days=1)
- index._accrued = index._fee_leg.accrued(index._step_in_date)
- index._value_date = (pd.Timestamp(index._trade_date) + 3* BDay()).date()
- index._update()
-
def aux(row, index, swap):
index.spread = row.spread
manual_index_update(index, row.date.date())
@@ -91,7 +85,7 @@ def build_swaption(index = 'IG', series = 27, expiry = datetime.date(2017, 4, 19
#calculate mapped vol
df['moneyness'] = (strike- df.spread)/df.spread
- df['days_to_expiry'] = (expiry - df.date) / np.timedelta64(1,'D')
+ df['days_to_expiry'] = (expiry - df.date).dt.days
vol_surface = build_vol_surface_functions(trade_date, index, series)
df['mapped_vol'] = df.apply(vol_from_surface, axis = 1, args=(vol_surface[0], vol_surface[1]))
df['mapping_shift'] = pd.to_numeric(df.vol/100 - df.mapped_vol, errors = 'ignore')
@@ -99,12 +93,37 @@ def build_swaption(index = 'IG', series = 27, expiry = datetime.date(2017, 4, 19
return df
+def calc_delta_pnl(index, series, ref, trade_date, notional, t_range, spread_range):
+
+ index_obj = Index.from_name(index, series, '5yr', trade_date)
+ index_obj.spread = ref
+ index_obj.notional = notional
+ startingpv = -index_obj.clean_pv
+
+ index_pv = {}
+ for date in t_range:
+ for spread in spread_range:
+ index_obj.spread = spread
+ manual_index_update(index_obj, date)
+ #import pdb; pdb.set_trace()
+ index_pv[(date, spread)] = -index_obj.clean_pv + notional* (date.date()-trade_date).days/360* index_obj.fixed_rate/10000
+
+ df = pd.DataFrame.from_dict(index_pv, orient = 'index').reset_index()
+ df['date'] = df['index'].apply(lambda x: x[0])
+ df['spread'] = df['index'].apply(lambda x: x[1])
+ del df['index']
+ df = df.set_index(['date','spread']).sort_index()
+ df = (df - startingpv).unstack(-1)
+ df.columns = df.columns.droplevel()
+
+ return df
+
def find_mapped_pv(bought, sold, date):
sold = sold.xs(date).reset_index()
bought = bought.xs(date).reset_index()
- #Bivariate B-Spline, instead of interp2d. Interp2d doesn't behave well....
+ #Bivariate B-Spline, instead of interp2d. Interp2d doesn't behave well and complains a lot. annoying
x = bought.spread.unique()
y = sorted(bought.mapping_shift.unique())
grid = np.meshgrid(x,y)
@@ -112,7 +131,6 @@ def find_mapped_pv(bought, sold, date):
f_sold = SmoothBivariateSpline(sold.spread, sold.mapping_shift, sold.pv, kx = 4, ky = 4)
intp_buy = f_buy.ev(grid[0],grid[1])
intp_sold = f_sold.ev(grid[0],grid[1])
- #import pdb; pdb.set_trace()
df = pd.DataFrame(intp_buy, index = grid[1][0:,0], columns = grid[0][0])
df1 = pd.DataFrame(intp_sold, index = grid[1][0:,0], columns = grid[0][0])
@@ -151,8 +169,6 @@ def plot_color_map(df, val_date):
#rows are spread, columns are volatility surface shift
fig, ax = plt.subplots()
- #import pdb; pdb.set_trace()
-
#Different ways to do a colormap: imshow and pcolormesh. using imshow here
midpoint = 1 - df.max().max()/(df.max().max() + abs(df.min().min()))
shifted_cmap = shiftedColorMap(cm.RdYlGn, midpoint=midpoint, name='shifted')
@@ -166,8 +182,6 @@ def plot_color_map(df, val_date):
fig.colorbar(chart, shrink = .8)
- #import pdb; pdb.set_trace()
-
fig.savefig("/home/serenitas/edwin/PythonGraphs/payer_swap_" + str(val_date.date()) + ".png")
def build_vol_surface_functions(date = datetime.date(2017, 2, 23), index = 'IG', series = '27'):
@@ -187,7 +201,7 @@ def build_vol_surface_functions(date = datetime.date(2017, 2, 23), index = 'IG',
df1 = df1.groupby(df1.index).filter(lambda x: len(x) >= 2)
df1 = df1.reset_index()
#once the dates are in the columns you need the use .dt to access dates functions
- df1['days_to_expiry'] = (df1.expiry - df1.quotedate.dt.normalize().dt.tz_localize(None)) / np.timedelta64(1,'D')
+ df1['days_to_expiry'] = (df1.expiry - df1.quotedate.dt.normalize().dt.tz_localize(None)).dt.days
df1['moneyness'] = (df1.strike - df1.ref)/df1.ref
df1 = df1.groupby(['days_to_expiry','moneyness']).nth(-1).vol
df1 = df1.reset_index()
@@ -201,6 +215,21 @@ def vol_from_surface(row, f, g):
vol = g(row.days_to_expiry, row.moneyness)
return vol
+def calc_and_plot(bought, sold, traded_price, week, lowerbound, upperbound, deltaPNL=None):
+
+ if week > len(bought.index.get_level_values(0).unique()):
+ week = len(bought.index.get_level_values(0).unique())-1
+
+ date = bought.index.get_level_values(0).unique()[week]
+
+ PNL = find_mapped_pv(bought, sold, date) - traded_price + deltaPNL.loc[date.date()]
+
+ PNL = PNL[lowerbound:upperbound].sort_index(ascending = False)
+
+ plot_color_map(PNL, date)
+
+ return PNL
+
def full_analysis():
index = 'IG'
series = 27,
@@ -210,37 +239,41 @@ def full_analysis():
sell_strike = 72
ref = 62
trade_date = datetime.date(2017, 2, 23)
+ delta_notional = 13000000
t_range = pd.bdate_range(trade_date, buy_expiry- BDay(), freq = '5B')
+ spread_range = pd.Series(np.arange(ref - 10, ref +19, 5))
- bought = build_swaption(index, series, buy_expiry, buy_strike, ref, trade_date, t_range)
- sold = build_swaption(index, series, sell_expiry, sell_strike, ref, trade_date, t_range)
+ bought = build_swaption(index, series, buy_expiry, buy_strike, ref, trade_date, t_range, spread_range)
+ sold = build_swaption(index, series, sell_expiry, sell_strike, ref, trade_date, t_range, spread_range)
+
+ delta_PNL = calc_delta_pnl(index, series, ref, trade_date, delta_notional, t_range, spread_range)
#Calc PNL and Plot:
traded_price = 5000
lowerbound = -.05 #parallel shift down 5% vol
upperbound = .1 #parallel shift up 10% vol
- week = -3 #negative to count backwards
+ week = 1 #negative to count backwards
- PNL = calc_and_plot(bought, sold, traded_price, week, lowerbound, upperbound)
+ PNL = calc_and_plot(bought, sold, traded_price, week, lowerbound, upperbound, delta_PNL)
+
+ return (bought, sold, PNL, delta_PNL)
- return (bought, sold, PNL)
-def calc_and_plot(bought, sold, traded_price, week, lowerbound, upperbound):
- if week > len(bought.index.get_level_values(0).unique()):
- week = len(bought.index.get_level_values(0).unique())-1
- date = bought.index.get_level_values(0).unique()[week]
- PNL = find_mapped_pv(bought, sold, date) - traded_price
- PNL = PNL[lowerbound:upperbound].sort_index(ascending = False)
- plot_color_map(PNL, date)
- return PNL
+def manual_index_update(index, date): #index as Index Object
+ index._yc = index._yc.expected_forward_curve(date)
+ index._trade_date = date
+ index._step_in_date = index.trade_date + datetime.timedelta(days=1)
+ index._accrued = index._fee_leg.accrued(index._step_in_date)
+ index._value_date = (pd.Timestamp(index._trade_date) + 3* BDay()).date()
+ index._update()
import numpy as np