aboutsummaryrefslogtreecommitdiffstats
path: root/python/option_trades_et.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/option_trades_et.py')
-rw-r--r--python/option_trades_et.py303
1 files changed, 303 insertions, 0 deletions
diff --git a/python/option_trades_et.py b/python/option_trades_et.py
new file mode 100644
index 00000000..9f27e4b3
--- /dev/null
+++ b/python/option_trades_et.py
@@ -0,0 +1,303 @@
+import analytics.option as opt
+import pandas as pd
+import numpy as np
+import matplotlib.pyplot as plt
+import numpy as np
+import matplotlib.colors as colors
+import math
+
+from matplotlib import cm
+from exploration.option_trades import *
+from pandas.tseries.offsets import *
+from analytics import Index, ForwardIndex
+from db import dbengine, dbconn
+from scipy.interpolate import *
+
+serenitasdb = dbengine('serenitasdb')
+
+def get_dfs(index="IG"):
+ df0 = atm_vol(index, datetime.date(2014, 6, 11))
+ df = rolling_vol(df0, 'atm_vol', term=[1,2,3,4,5,6])
+ df1 = rolling_vol(df0, 'otm_vol', term=[1,2,3,4,5,6])
+ return (df,df1)
+
+def calendar_spread():
+ df = get_dfs()[0]
+ df['cal 3m-1m'] = df['3m']-df['1m']
+ df['cal 5m-3m'] = df['5m']-df['3m']
+ df = df.sort_index()
+ df = df.groupby(df.index.date).nth(-1)
+ df[['cal 3m-1m','cal 5m-3m']].plot()
+ #last 100,100-200,200-300 days
+ avg = pd.DataFrame([df[-100:].mean(),df[-200:-100].mean(),df[-300:-200].mean()])
+ return (df[-1:], avg)
+
+def put_spread(index = "IG"):
+ dfs = get_dfs()
+ df = pd.concat([dfs[0], dfs[1]], axis = 1, keys=['atm','otm'])
+ steepness = df['otm'] - df['atm']
+ steepness.plot()
+ #last 100,100-200,200-300 days
+ avg = pd.DataFrame([steepness[-100:].mean(),steepness[-200:-100].mean(),steepness[-300:-200].mean()])
+ return (steepness[-1:], avg)
+
+def swaption_analysis():
+ cal = calendar_spread()
+ cal_otm = calendar_spread(moneyness = "otm_vol")
+ vol_df = atm_vol('IG',27).groupby(level = 'quotedate').last().dropna()
+
+def beta_calc():
+ am = arch_model(10000*index_price_returns(index='IG'))
+ res = am.fit(update_freq=0, disp='off')
+
+ amIG = arch_model(100*index_returns())
+ resIG = amIG.fit(update_freq=0, disp='off')
+ ltvar = lr_var(resIG)
+
+ amHY = arch_model(1000*index_returns(index = 'HY'))
+ resHY = amHY.fit(update_freq=0, disp='off')
+ ltvar = lr_var(resHY)
+ graphit = compute_allocation(all_tenors)
+
+def build_swaption(index = 'IG', series = 27, expiry = datetime.date(2017, 4, 19), strike = 65, ref = 62, trade_date = datetime.date(2017, 2, 23), t_range = None):
+ index_obj = Index.from_name(index, series, '5yr', trade_date)
+ swap_obj = opt.Swaption(index_obj, expiry, strike, option_type="payer")
+ swap_obj.notional = 100000000
+
+ if t_range is None:
+ t_range = pd.bdate_range(trade_date, expiry- BDay(), freq = '5B')
+ vol_range = pd.Series(np.arange(25, 60, 5)) #not inclusive of end point
+ spread_range = pd.Series(np.arange(ref - 10, ref +19, 5))
+
+ df = pd.DataFrame(index = pd.MultiIndex.from_product([t_range, spread_range, vol_range], names = ['date', 'spread', 'vol']), columns = ['pv'])
+ df = df.reset_index()
+
+ def manual_index_update(index, date):
+ index._yc = index._yc.expected_forward_curve(date)
+ index._trade_date = date
+ index._step_in_date = index.trade_date + datetime.timedelta(days=1)
+ index._accrued = index._fee_leg.accrued(index._step_in_date)
+ index._value_date = (pd.Timestamp(index._trade_date) + 3* BDay()).date()
+ index._update()
+
+ def aux(row, index, swap):
+ index.spread = row.spread
+ manual_index_update(index, row.date.date())
+ swap.sigma = row.vol/100
+ swap._update()
+ return swap.pv
+
+ df['pv'] = df.apply(aux, axis=1, args=(index_obj, swap_obj))
+
+ #calculate mapped vol
+ df['moneyness'] = (strike- df.spread)/df.spread
+ df['days_to_expiry'] = (expiry - df.date) / np.timedelta64(1,'D')
+ vol_surface = build_vol_surface_functions(trade_date, index, series)
+ df['mapped_vol'] = df.apply(vol_from_surface, axis = 1, args=(vol_surface[0], vol_surface[1]))
+ df['mapping_shift'] = pd.to_numeric(df.vol/100 - df.mapped_vol, errors = 'ignore')
+ df = df.set_index(['date', 'spread', 'vol'])
+
+ return df
+
+def find_mapped_pv(bought, sold, date):
+
+ sold = sold.xs(date).reset_index()
+ bought = bought.xs(date).reset_index()
+
+ #Bivariate B-Spline, instead of interp2d. Interp2d doesn't behave well....
+ x = bought.spread.unique()
+ y = sorted(bought.mapping_shift.unique())
+ grid = np.meshgrid(x,y)
+ f_buy = SmoothBivariateSpline(bought.spread, bought.mapping_shift, bought.pv, kx = 4, ky = 4)
+ f_sold = SmoothBivariateSpline(sold.spread, sold.mapping_shift, sold.pv, kx = 4, ky = 4)
+ intp_buy = f_buy.ev(grid[0],grid[1])
+ intp_sold = f_sold.ev(grid[0],grid[1])
+ #import pdb; pdb.set_trace()
+ df = pd.DataFrame(intp_buy, index = grid[1][0:,0], columns = grid[0][0])
+ df1 = pd.DataFrame(intp_sold, index = grid[1][0:,0], columns = grid[0][0])
+
+ #Use NDInterpolate - not copmplete
+ #f_buy = LinearNDInterpolator((bought.spread, bought.mapping_shift), bought.pv)
+ #f_sold = LinearNDInterpolator((sold.spread, sold.mapping_shift), sold.pv)
+
+ #Use interp2d
+ #x = bought.spread.unique()
+ #y = sorted(bought.mapping_shift.unique())
+ #f_buy = interp2d(bought.spread, bought.mapping_shift, bought.pv)
+ #f_sold = interp2d(sold.spread, sold.mapping_shift, sold.pv)
+ #intp_buy = f_buy(x,y)
+ #intp_sold = f_sold(x,y)
+ #df = pd.DataFrame(data = intp_buy, index = y, columns = x)
+ #df1 = pd.DataFrame(data = intp_sold, index = y, columns = x)
+
+ PNL = df - df1
+
+ return PNL
+
+def result_fill(df, date):
+
+ data = df.xs(date).reset_index()
+ #make df.vol a variable to make this function more general
+ f = interp2d(data.spread, data.vol, data.pv)
+ x = np.arange(data.spread.min(), data.spread.max(), .5)
+ y = np.arange(data.vol.min(), data.vol.max(), .5)
+ intp_result = f(x,y)
+ df1 = pd.DataFrame(data = intp_result, index = y, columns = x)
+
+ return df1
+
+def plot_color_map(df, val_date):
+
+ #rows are spread, columns are volatility surface shift
+ fig, ax = plt.subplots()
+
+ #import pdb; pdb.set_trace()
+
+ #Different ways to do a colormap: imshow and pcolormesh. using imshow here
+ midpoint = 1 - df.max().max()/(df.max().max() + abs(df.min().min()))
+ shifted_cmap = shiftedColorMap(cm.RdYlGn, midpoint=midpoint, name='shifted')
+
+ chart = ax.imshow(df, extent=(df.columns.min(), df.columns.max(), df.index.min(), df.index.max()) \
+ ,aspect= 'auto', interpolation='bilinear', cmap=shifted_cmap)
+
+ ax.set_xlabel('Spread')
+ ax.set_ylabel('Parallel Shift of Volatility Surface')
+ ax.set_title('PV of Trade on ' + str(val_date.date()))
+
+ fig.colorbar(chart, shrink = .8)
+
+ #import pdb; pdb.set_trace()
+
+ fig.savefig("/home/serenitas/edwin/PythonGraphs/payer_swap_" + str(val_date.date()) + ".png")
+
+def build_vol_surface_functions(date = datetime.date(2017, 2, 23), index = 'IG', series = '27'):
+ df1 = pd.read_sql_query('SELECT quotedate, expiry, series, strike, vol ' \
+ 'FROM swaption_quotes ' \
+ 'WHERE index = %s and series = %s and date(quotedate) = %s',
+ serenitasdb,
+ index_col=['quotedate', 'expiry', 'series'],
+ params=(index.upper(), series, date), parse_dates=['quotedate', 'expiry'])
+ index_data = pd.read_sql_query(
+ 'SELECT quotedate, expiry, series, ref, fwdspread FROM swaption_ref_quotes ' \
+ 'WHERE index= %s and date(quotedate) = %s',
+ serenitasdb, index_col=['quotedate', 'expiry', 'series'],
+ params=(index.upper(), date), parse_dates=['quotedate', 'expiry'])
+
+ df1 = df1.join(index_data)
+ df1 = df1.groupby(df1.index).filter(lambda x: len(x) >= 2)
+ df1 = df1.reset_index()
+ #once the dates are in the columns you need the use .dt to access dates functions
+ df1['days_to_expiry'] = (df1.expiry - df1.quotedate.dt.normalize().dt.tz_localize(None)) / np.timedelta64(1,'D')
+ df1['moneyness'] = (df1.strike - df1.ref)/df1.ref
+ df1 = df1.groupby(['days_to_expiry','moneyness']).nth(-1).vol
+ df1 = df1.reset_index()
+ f = LinearNDInterpolator((df1.days_to_expiry, df1.moneyness), df1.vol)
+ g = NearestNDInterpolator((df1.days_to_expiry, df1.moneyness), df1.vol)
+ return (f,g)
+
+def vol_from_surface(row, f, g):
+ vol = f(row.days_to_expiry, row.moneyness)
+ if math.isnan(vol) is True:
+ vol = g(row.days_to_expiry, row.moneyness)
+ return vol
+
+def full_analysis():
+ index = 'IG'
+ series = 27,
+ buy_expiry = datetime.date(2017, 4, 19)
+ buy_strike = 65
+ sell_expiry = datetime.date(2017, 5, 17)
+ sell_strike = 72
+ ref = 62
+ trade_date = datetime.date(2017, 2, 23)
+
+ t_range = pd.bdate_range(trade_date, buy_expiry- BDay(), freq = '5B')
+
+ bought = build_swaption(index, series, buy_expiry, buy_strike, ref, trade_date, t_range)
+ sold = build_swaption(index, series, sell_expiry, sell_strike, ref, trade_date, t_range)
+
+ #Calc PNL and Plot:
+ traded_price = 5000
+ lowerbound = -.05 #parallel shift down 5% vol
+ upperbound = .1 #parallel shift up 10% vol
+ week = -3 #negative to count backwards
+
+ PNL = calc_and_plot(bought, sold, traded_price, week, lowerbound, upperbound)
+
+ return (bought, sold, PNL)
+
+def calc_and_plot(bought, sold, traded_price, week, lowerbound, upperbound):
+
+ if week > len(bought.index.get_level_values(0).unique()):
+ week = len(bought.index.get_level_values(0).unique())-1
+
+ date = bought.index.get_level_values(0).unique()[week]
+
+ PNL = find_mapped_pv(bought, sold, date) - traded_price
+
+ PNL = PNL[lowerbound:upperbound].sort_index(ascending = False)
+
+ plot_color_map(PNL, date)
+
+ return PNL
+
+
+
+import numpy as np
+import matplotlib
+import matplotlib.pyplot as plt
+from mpl_toolkits.axes_grid1 import AxesGrid
+
+def shiftedColorMap(cmap, start=0, midpoint=0.5, stop=1.0, name='shiftedcmap'):
+ '''
+ Function to offset the "center" of a colormap. Useful for
+ data with a negative min and positive max and you want the
+ middle of the colormap's dynamic range to be at zero
+
+ Input
+ -----
+ cmap : The matplotlib colormap to be altered
+ start : Offset from lowest point in the colormap's range.
+ Defaults to 0.0 (no lower ofset). Should be between
+ 0.0 and `midpoint`.
+ midpoint : The new center of the colormap. Defaults to
+ 0.5 (no shift). Should be between 0.0 and 1.0. In
+ general, this should be 1 - vmax/(vmax + abs(vmin))
+ For example if your data range from -15.0 to +5.0 and
+ you want the center of the colormap at 0.0, `midpoint`
+ should be set to 1 - 5/(5 + 15)) or 0.75
+ stop : Offset from highets point in the colormap's range.
+ Defaults to 1.0 (no upper ofset). Should be between
+ `midpoint` and 1.0.
+ '''
+ cdict = {
+ 'red': [],
+ 'green': [],
+ 'blue': [],
+ 'alpha': []
+ }
+
+ # regular index to compute the colors
+ reg_index = np.linspace(start, stop, 257)
+
+ # shifted index to match the data
+ shift_index = np.hstack([
+ np.linspace(0.0, midpoint, 128, endpoint=False),
+ np.linspace(midpoint, 1.0, 129, endpoint=True)
+ ])
+
+ for ri, si in zip(reg_index, shift_index):
+ r, g, b, a = cmap(ri)
+
+ cdict['red'].append((si, r, r))
+ cdict['green'].append((si, g, g))
+ cdict['blue'].append((si, b, b))
+ cdict['alpha'].append((si, a, a))
+
+ newcmap = matplotlib.colors.LinearSegmentedColormap(name, cdict)
+ plt.register_cmap(cmap=newcmap)
+
+ return newcmap
+
+
+