1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
|
import math
import os
import pandas as pd
import feather
from index_data import index_returns, get_index_quotes
from arch import arch_model
from math import log, exp, sqrt
import numpy as np
from scipy.optimize import minimize_scalar
from scipy.optimize import minimize
import matplotlib.pyplot as plt
def calc_returns():
returns = index_returns(index=['IG', 'HY'], tenor='5yr')
returns_hy = (returns.
xs('HY', level=1).
dropna().
reset_index(level='series').
groupby(level=['date']).
nth(-1))
returns_hy = returns_hy.set_index('series', append=True)
returns_ig = returns.xs('IG', level=1).reset_index('tenor', drop=True)
# hy starts trading later than ig, so we line it up based on hy series
df = pd.merge(returns_hy, returns_ig, left_index=True, right_index=True,
suffixes=('_hy','_ig'))
returns = df[['price_return_hy', 'price_return_ig']]
returns.columns = ['hy', 'ig']
#feather.write_dataframe(returns.reset_index(),
# os.path.join(os.environ["DATA_DIR"], "index_returns.fth"))
return returns.reset_index('series', drop=True)
def calc_betas():
returns = calc_returns()
beta_ewma = (returns.
ewm(span=20).
cov().
groupby(level='date').
apply(lambda df: df.values[0,1]/df.values[1,1]))
beta_ewma5 = (returns.
ewm(span=5).
cov().
groupby(level='date').
apply(lambda df: df.values[0,1]/df.values[1,1]))
return (beta_ewma, beta_ewma5)
def plot_betas():
betas = calc_betas()
plt.plot(betas[0], label = 'EWMA20')
plt.plot(betas[1], label = 'EWMA5')
plt.xlabel('date')
plt.ylabel('beta')
plt.legend()
def calc_realized_vol():
# three ways of computing the volatility
# 1) 20 days simple moving average
# 2) exponentially weighted moving average
# 3) GARCH(1,1), we scale returns by 10 to help with the fitting
returns = calc_returns()
vol_sma = pd.DataFrame()
vol_ewma = pd.DataFrame()
for index in returns:
vol_sma[index] = returns[index].rolling(20).std() * math.sqrt(252)
vol_ewma[index] = returns[index].ewm(span=20).std() * math.sqrt(252)
scale = 10
am = arch_model(scale * returns.hy.dropna())
res = am.fit()
vol_garch = res.conditional_volatility * math.sqrt(252)/scale
vol = pd.concat([vol_sma, vol_ewma, vol_garch], axis=1, keys=['sma', 'ewma', 'garch'])
## 2 standard deviation
vol.quantile(.95)
#feather.write_dataframe(beta_ewma.to_frame('beta'),
# os.path.join(os.environ['DATA_DIR'], "beta.fth"))
def spreads_ratio():
df = get_index_quotes(series = list(range(22,29)))
df1 = pd.DataFrame()
for index in ['IG', 'HY']:
df1[index] = df.modelspread.xs((index, '5yr'), level=[1,4]).groupby('date').last()
df1['ratio'] = df1.HY/df1.IG
return df1
def loglik(beta, returns):
x = (returns.hy - beta*returns.ig)
model = AR(x, missing='drop')
fit = model.fit(maxlag=1)
return - fit.llf
# r = []
# for beta in np.arange(3, 5, 0.01):
# prog = minimize(loglik, np.array([0.1, 0.1, 0.1]), args=(returns, beta),
# bounds=[(None, None), (1e-6, None), (None, None)],
# method='L-BFGS-B')
# r.append(prog.fun)
# r = []
# for beta in np.arange(3, 5, 0.01):
# r.append(test(returns, beta))
|