aboutsummaryrefslogtreecommitdiffstats
path: root/python/exploration/option_trades.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/exploration/option_trades.py')
-rw-r--r--python/exploration/option_trades.py102
1 files changed, 63 insertions, 39 deletions
diff --git a/python/exploration/option_trades.py b/python/exploration/option_trades.py
index e3217e07..aadbddfd 100644
--- a/python/exploration/option_trades.py
+++ b/python/exploration/option_trades.py
@@ -26,7 +26,7 @@ def realized_vol(index, series, tenor='5yr', date=None, years=None, return_type=
return (res.conditional_volatility * math.sqrt(252), res)
def lr_var(res):
- """ computes long run variance of the garch process
+ r""" computes long run variance of the garch process
.. math::
@@ -38,21 +38,38 @@ def lr_var(res):
var = res.params[names[0]]/(1 - res.params[names[1:]])
return math.sqrt(var * 252)
-def atm_vol_fun(v, moneyness=0.2, index=None):
- f = interp1d(v.strike.values, v.vol.values, fill_value='extrapolate')
- if index is None:
- atm_val = v['fwdspread'].iat[0]
- otm_val = atm_val * (1 + moneyness)
- else:
- index.ref = v['ref'].iat[0]
- atm_val = ForwardIndex(index, v.index.get_level_values('expiry')[0]).forward_spread
- otm_val = atm_val * (1 + moneyness)
- if index._quote_is_price:
- index.spread = atm_val
- atm_val = index.price
- index.spread = otm_val
- otm_val = index.price
- return pd.Series(f(np.array([atm_val, otm_val])), index = ['atm_vol', 'otm_vol'])
+def atm_vol_calc(df, index_type, moneyness):
+ r = np.empty((len(df.index.unique()), 3))
+ i = 0
+ index_keys = []
+ for s, g1 in df.groupby(level='series'):
+ index = Index.from_name(index_type, s, '5yr')
+ for date, g2 in g1.groupby(pd.Grouper(level='quotedate', freq='D')):
+ if not g2.empty:
+ index.trade_date = date.date()
+ for (ref, expiry), g3 in g2.reset_index('expiry').groupby(['ref', 'expiry']):
+ index.ref = ref
+ atm_val = forward_spread = ForwardIndex(index, expiry, False).forward_spread
+ otm_val = atm_val * (1 + moneyness)
+ if index._quote_is_price:
+ index.spread = atm_val
+ atm_val = index.price
+ index.spread = otm_val
+ otm_val = index.price
+ for quotedate, v in g3.groupby(level='quotedate'):
+ f = interp1d(v.strike.values, v.vol.values, fill_value='extrapolate')
+ r[i, 0] = forward_spread
+ r[i, 1:] = f([atm_val, otm_val])
+ i += 1
+ index_keys.append((quotedate, expiry, s))
+ df = pd.DataFrame(data=r,
+ index=pd.MultiIndex.from_tuples(index_keys,
+ names=['quotedate', 'expiry', 'series']),
+ columns=['forward_spread', 'atm_vol', 'otm_vol'])
+ df['T'] = df.index.get_level_values('expiry').values.astype('datetime64[D]') - \
+ df.index.get_level_values('quotedate').values.astype('datetime64[D]')
+ df['T'] = df['T'].dt.days / 365
+ return df
def atm_vol(index, date, series=None, moneyness=0.2):
sql_str = "SELECT * from swaption_ref_quotes JOIN swaption_quotes " \
@@ -65,21 +82,7 @@ def atm_vol(index, date, series=None, moneyness=0.2):
df = pd.read_sql_query(sql_str, serenitasdb,
index_col=['quotedate', 'expiry', 'series'],
params=params, parse_dates=['quotedate'])
- df1 = atm_vol_calc(df, index)
- return df1
-
-def atm_vol_calc(df, index):
- g_temp = {}
- for s, g1 in df.groupby(level='series'):
- index_obj = Index.from_name(index, s, '5yr')
- for date, g2 in g1.groupby(g1.index.get_level_values(0)):
- index_obj.trade_date = date.date()
- for expiry, g3 in g2.groupby(g2.index.get_level_values(1)):
- g_temp[(date, expiry, s)] = atm_vol_fun(g3, index=index_obj)
- df = pd.concat(g_temp, names=['quotedate', 'expiry', 'series'])
- df = df.unstack(-1)
- df = df.reset_index(level=['expiry', 'series'])
- return df
+ return atm_vol_calc(df, index, moneyness)
def rolling_vol(df, col='atm_vol', term=[3]):
"""compute the rolling volatility for various terms"""
@@ -224,11 +227,32 @@ def compute_allocation(df):
return (W, fund_return, fund_vol)
if __name__ == "__main__":
- d1 = sell_vol_strategy(months=1)
- d2 = sell_vol_strategy(months=2)
- d3 = sell_vol_strategy(months=3)
- all_tenors = pd.concat([aggregate_trades(d) for d in [d1, d2, d3]], axis=1)
- all_tenors.columns = ['1m', '2m', '3m']
- all_tenors['optimal'] = ((1.2*all_tenors['1m']).
- sub(1.2*all_tenors['2m'], fill_value=0).
- add(all_tenors['3m'], fill_value=0))
+ # d1 = sell_vol_strategy(months=1)
+ # d2 = sell_vol_strategy(months=2)
+ # d3 = sell_vol_strategy(months=3)
+ # all_tenors = pd.concat([aggregate_trades(d) for d in [d1, d2, d3]], axis=1)
+ # all_tenors.columns = ['1m', '2m', '3m']
+ # all_tenors['optimal'] = ((1.2*all_tenors['1m']).
+ # sub(1.2*all_tenors['2m'], fill_value=0).
+ # add(all_tenors['3m'], fill_value=0))
+ import datetime
+ import statsmodels.formula.api as smf
+ ## HY
+ df = atm_vol("HY", datetime.date(2017, 3, 20))
+ df['forward_spread'] *= 1e-4
+ df['log_forward_spread'] = np.log(df['forward_spread'])
+ df['log_atm_vol'] = np.log(df['atm_vol'])
+ df_hy28 = df.xs(28, level='series')
+ results = smf.ols('log_atm_vol ~ log_forward_spread + T', data=df_hy28).fit()
+ beta_hy28 = 1 + results.params.log_forward_spread
+ print(results.summary())
+
+ ## IG
+ df = atm_vol("IG", datetime.date(2017, 3, 20))
+ df['forward_spread'] *= 1e-4
+ df['log_forward_spread'] = np.log(df['forward_spread'])
+ df['log_atm_vol'] = np.log(df['atm_vol'])
+ df_ig28 = df.xs(28, level='series')
+ results = smf.ols('log_atm_vol ~ log_forward_spread + T', data=df_ig28).fit()
+ beta_ig28 = 1 + results.params.log_forward_spread
+ print(results.summary())