aboutsummaryrefslogtreecommitdiffstats
path: root/python/exploration/beta_trade.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/exploration/beta_trade.py')
-rw-r--r--python/exploration/beta_trade.py135
1 files changed, 83 insertions, 52 deletions
diff --git a/python/exploration/beta_trade.py b/python/exploration/beta_trade.py
index 4f10e61f..bc53c33d 100644
--- a/python/exploration/beta_trade.py
+++ b/python/exploration/beta_trade.py
@@ -2,58 +2,89 @@ import math
import os
import pandas as pd
import feather
-from index_data import index_returns
+from index_data import index_returns, get_index_quotes
from arch import arch_model
from math import log, exp, sqrt
import numpy as np
from scipy.optimize import minimize_scalar
+from scipy.optimize import minimize
-returns = index_returns(index=['IG', 'HY'], tenor='5yr')
-returns_hy = (returns.
- xs('HY', level=1).
- dropna().
- reset_index(level='series').
- groupby(level=['date']).
- nth(-1))
-returns_hy = returns_hy.set_index('series', append=True)
-returns_ig = returns.xs('IG', level=1).reset_index('tenor', drop=True)
-# hy starts trading later than ig, so we line it up based on hy series
-df = pd.merge(returns_hy, returns_ig, left_index=True, right_index=True,
- suffixes=('_hy','_ig'))
-returns = df[['price_return_hy', 'price_return_ig']]
-returns.columns = ['hy', 'ig']
-returns = returns.reset_index('series', drop=True)
-feather.write_dataframe(returns.reset_index(),
- os.path.join(os.environ["DATA_DIR"], "index_returns.fth"))
+import matplotlib.pyplot as plt
-#returns = returns.groupby('date').nth(-1)
-# three ways of computing the volatility
-# 20 days simple moving average
-vol_sma = returns.hy.rolling(20).std() * math.sqrt(252)
-vol_ewma = returns.hy.ewm(span=20).std() * math.sqrt(252)
-# GARCH(1,1)
-# we scale returns by 10 to help with the fitting
-scale = 10
-am = arch_model(scale * returns.hy.dropna())
-res = am.fit()
-vol_garch = res.conditional_volatility * math.sqrt(252)/scale
-vol = pd.concat([vol_sma, vol_ewma, vol_garch], axis=1, keys=['sma', 'ewma', 'garch'])
+def calc_returns():
+ returns = index_returns(index=['IG', 'HY'], tenor='5yr')
+ returns_hy = (returns.
+ xs('HY', level=1).
+ dropna().
+ reset_index(level='series').
+ groupby(level=['date']).
+ nth(-1))
+ returns_hy = returns_hy.set_index('series', append=True)
+ returns_ig = returns.xs('IG', level=1).reset_index('tenor', drop=True)
+ # hy starts trading later than ig, so we line it up based on hy series
+ df = pd.merge(returns_hy, returns_ig, left_index=True, right_index=True,
+ suffixes=('_hy','_ig'))
+ returns = df[['price_return_hy', 'price_return_ig']]
+ returns.columns = ['hy', 'ig']
+ #feather.write_dataframe(returns.reset_index(),
+ # os.path.join(os.environ["DATA_DIR"], "index_returns.fth"))
+ return returns.reset_index('series', drop=True)
-## let's get the betas
-beta_ewma = (returns.
- ewm(span=20).
- cov().
- groupby(level='date').
- apply(lambda df: df.values[0,1]/df.values[1,1]))
+def calc_betas():
+ returns = calc_returns()
+ beta_ewma = (returns.
+ ewm(span=20).
+ cov().
+ groupby(level='date').
+ apply(lambda df: df.values[0,1]/df.values[1,1]))
-beta_ewma5 = (returns.
- ewm(span=5).
- cov().
- groupby(level='date').
- apply(lambda df: df.values[0,1]/df.values[1,1]))
+ beta_ewma5 = (returns.
+ ewm(span=5).
+ cov().
+ groupby(level='date').
+ apply(lambda df: df.values[0,1]/df.values[1,1]))
-feather.write_dataframe(beta_ewma.to_frame('beta'),
- os.path.join(os.environ['DATA_DIR'], "beta.fth"))
+ return (beta_ewma, beta_ewma5)
+
+def plot_betas():
+ betas = calc_betas()
+ plt.plot(betas[0], label = 'EWMA20')
+ plt.plot(betas[1], label = 'EWMA5')
+ plt.xlabel('date')
+ plt.ylabel('beta')
+ plt.legend()
+
+def calc_realized_vol():
+
+ # three ways of computing the volatility
+ # 1) 20 days simple moving average
+ # 2) exponentially weighted moving average
+ # 3) GARCH(1,1), we scale returns by 10 to help with the fitting
+ returns = calc_returns()
+ vol_sma = pd.DataFrame()
+ vol_ewma = pd.DataFrame()
+ for index in returns:
+ vol_sma[index] = returns[index].rolling(20).std() * math.sqrt(252)
+ vol_ewma[index] = returns[index].ewm(span=20).std() * math.sqrt(252)
+ scale = 10
+ am = arch_model(scale * returns.hy.dropna())
+ res = am.fit()
+ vol_garch = res.conditional_volatility * math.sqrt(252)/scale
+ vol = pd.concat([vol_sma, vol_ewma, vol_garch], axis=1, keys=['sma', 'ewma', 'garch'])
+
+ ## 2 standard deviation
+ vol.quantile(.95)
+
+ #feather.write_dataframe(beta_ewma.to_frame('beta'),
+ # os.path.join(os.environ['DATA_DIR'], "beta.fth"))
+
+def spreads_ratio():
+ df = get_index_quotes(series = list(range(22,29)))
+ df1 = pd.DataFrame()
+ for index in ['IG', 'HY']:
+ df1[index] = df.modelspread.xs((index, '5yr'), level=[1,4]).groupby('date').last()
+ df1['ratio'] = df1.HY/df1.IG
+ return df1
def loglik(beta, returns):
x = (returns.hy - beta*returns.ig)
@@ -61,13 +92,13 @@ def loglik(beta, returns):
fit = model.fit(maxlag=1)
return - fit.llf
-r = []
-for beta in np.arange(3, 5, 0.01):
- prog = minimize(loglik, np.array([0.1, 0.1, 0.1]), args=(returns, beta),
- bounds=[(None, None), (1e-6, None), (None, None)],
- method='L-BFGS-B')
- r.append(prog.fun)
+# r = []
+# for beta in np.arange(3, 5, 0.01):
+# prog = minimize(loglik, np.array([0.1, 0.1, 0.1]), args=(returns, beta),
+# bounds=[(None, None), (1e-6, None), (None, None)],
+# method='L-BFGS-B')
+# r.append(prog.fun)
-r = []
-for beta in np.arange(3, 5, 0.01):
- r.append(test(returns, beta))
+# r = []
+# for beta in np.arange(3, 5, 0.01):
+# r.append(test(returns, beta))