aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--python/analytics/index_data.py4
-rw-r--r--python/analytics/portfolio.py2
-rw-r--r--python/exploration/curve_trades.py76
-rw-r--r--python/notebooks/Curve Trades.ipynb32
4 files changed, 87 insertions, 27 deletions
diff --git a/python/analytics/index_data.py b/python/analytics/index_data.py
index 784b16d4..687481cd 100644
--- a/python/analytics/index_data.py
+++ b/python/analytics/index_data.py
@@ -119,8 +119,8 @@ def index_returns(df=None, index=None, series=None, tenor=None, from_date=None,
df = df.reset_index('date').join(coupon_data).reset_index('tenor')
df.tenor = df.tenor.astype(tenor_t)
df = df.set_index(['tenor'], append=True)
- df['day_frac'] = (df.groupby(level=['index', 'series', 'tenor'])['date'].
- diff().
+ df['day_frac'] = (df.groupby(level=['index', 'series', 'tenor']).diff().
+ date.
astype('timedelta64[D]') / 360)
df['price_return'] += df.day_frac * df.coupon
df = df.drop(['day_frac', 'coupon', 'maturity'], axis=1)
diff --git a/python/analytics/portfolio.py b/python/analytics/portfolio.py
index fd73e0e5..0f57c674 100644
--- a/python/analytics/portfolio.py
+++ b/python/analytics/portfolio.py
@@ -12,7 +12,7 @@ def portf_repr(method):
obj = args[0]
thousands = lambda x: "{:,.2f}".format(x)
percent = lambda x: "N/A" if np.isnan(x) else f"{100*x:.2f}%"
- header = "Portfolio {}\n\n".format(obj.trade_date)
+ header = "Portfolio {}\n\n".format(obj.value_date)
kwargs = {'formatters': {'Notional': thousands,
'PV': thousands,
'Delta': percent,
diff --git a/python/exploration/curve_trades.py b/python/exploration/curve_trades.py
index b6d35d3d..3c0a25ad 100644
--- a/python/exploration/curve_trades.py
+++ b/python/exploration/curve_trades.py
@@ -11,25 +11,29 @@ import numpy as np
import matplotlib.pyplot as plt
from statsmodels.sandbox.regression.predstd import wls_prediction_std
+from scipy.interpolate import interp1d
-_engine = dbengine('serenitasdb')
+serenitasdb = dbengine('serenitasdb')
+dawndb = dbengine('dawndb')
def on_the_run(index):
- r = _engine.execute("SELECT max(series) FROM index_version WHERE index=%s",
+ r = serenitasdb.execute("SELECT max(series) FROM index_version WHERE index=%s",
(index,))
series, = r.fetchone()
return series
-def curve_spread_diff(index='IG', rolling=6):
+def curve_spread_diff(index='IG', rolling=6, years=3, percentage=False, percentage_base='5yr'):
otr = on_the_run(index)
## look at spreads
df = get_index_quotes(index, list(range(otr - rolling, otr + 1)),
- tenor=['3yr', '5yr', '7yr', '10yr'])
+ tenor=['3yr', '5yr', '7yr', '10yr'], years=years)
spreads = df.groupby(level=['date', 'tenor']).nth(-1)['closespread'].unstack(-1)
spreads_diff = spreads.diff(axis=1)
del spreads_diff['3yr']
spreads_diff.columns = ['3-5', '5-7', '7-10']
spreads_diff['5-10'] = spreads_diff['5-7'] + spreads_diff['7-10']
+ if percentage is True:
+ spreads_diff = spreads.apply(lambda df: df/df[percentage_base], axis = 1)
return spreads_diff
def spreads_diff_table(spreads_diff):
@@ -133,7 +137,6 @@ def cross_series_curve(index='IG', rolling=6):
agg(lambda df: (1 + df).prod() - 1))
plt.plot(monthly_returns_cross_series)
-
def forward_loss(index='IG'):
start_date = (pd.Timestamp.now() - pd.DateOffset(years=3)).date()
@@ -238,3 +241,66 @@ def spot_forward(index='IG', series=None, tenors=['3yr', '5yr', '7yr', '10yr']):
df = df_0.append(df)
df['maturity'] = [b_index.trade_date, maturity_1yr] + b_index.maturities
return df.reset_index().set_index('maturity')
+
+def curve_pos(trade_date, index='IG'):
+
+ '''
+ Input trade_date and index
+ Returns a Portfolio of curve trades '''
+
+ sql_string = "SELECT * FROM cds where trade_date < %s"
+ df = pd.read_sql_query(sql_string, dawndb, parse_dates=['trade_date', 'maturity'],
+ params=[trade_date])
+ if index is 'IG':
+ df = df[df['folder'] == 'SER_IGCURVE']
+ elif index is 'HY':
+ df = df[df['folder'] == 'SER_HYCURVE']
+ else:
+ df = df[df['folder'] == 'SER_ITRXCURVE']
+ df.notional = df.apply(lambda x: x.notional * -1 if x.protection == 'Buyer' else x.notional, axis = 1)
+ df = df.groupby(['security_id', 'maturity']).sum()['notional']
+ df = df.iloc[df.nonzero()[0]].reset_index()
+
+ sql_string = "SELECT * FROM index_maturity LEFT JOIN index_version USING (index, series)"
+ lookup_table = pd.read_sql_query(sql_string, serenitasdb, parse_dates=['maturity'])
+
+ df = df.merge(lookup_table, left_on=['security_id','maturity'], right_on=['redindexcode', 'maturity'])
+
+ indices = []
+ sql_string = "SELECT closespread FROM index_quotes where index = %s and series = %s and tenor = %s and date = %s"
+ for i, row in df[['index', 'tenor', 'series', 'notional']].iterrows():
+ temp = Index.from_name(row['index'], row.series, row.tenor)
+ temp.value_date = trade_date.date()
+ if row.notional > 0:
+ temp.direction = 'Seller'
+ temp.notional = abs(row.notional)
+ spread_df = pd.read_sql_query(sql_string, serenitasdb,
+ params=[row['index'], row.series, row.tenor, trade_date.date()])
+ temp.spread = spread_df.iloc[0][0]
+ indices.append(temp)
+
+ return Portfolio(indices)
+
+def curve_shape(trade_date, index = 'IG', percentile=.95):
+
+ '''
+ Returns a function to linearly interpolate between the curve based on maturity (in years)'''
+
+ curve_shape = curve_spread_diff(index, 10, 5, True)
+ steepness = (curve_shape['10yr']/curve_shape['3yr'])
+ series = on_the_run(index)
+
+ sql_string = "SELECT closespread FROM index_quotes where index = %s and series = %s and tenor = %s and date = %s"
+ spread_df = pd.read_sql_query(sql_string, serenitasdb,
+ params=[index, series, '5yr', trade_date.date()])
+ sql_string = "SELECT tenor, maturity FROM index_maturity where index = %s and series = %s"
+ lookup_table = pd.read_sql_query(sql_string, serenitasdb, parse_dates=['maturity'], params=[index, series])
+
+ df = curve_shape[steepness == steepness.quantile(percentile, 'nearest')]
+ df = df * spread_df.iloc[0][0]/df['5yr'][0]
+ df = df.stack().rename('spread')
+ df = df.reset_index().merge(lookup_table, on=['tenor'])
+ df['year_frac'] = (df.maturity - pd.to_datetime(trade_date)).dt.days/365
+ return interp1d(np.hstack([0, df.year_frac]), np.hstack([0, df.spread]))
+
+
diff --git a/python/notebooks/Curve Trades.ipynb b/python/notebooks/Curve Trades.ipynb
index f0027e40..5637beaf 100644
--- a/python/notebooks/Curve Trades.ipynb
+++ b/python/notebooks/Curve Trades.ipynb
@@ -9,7 +9,8 @@
"import curve_trades as ct\n",
"import matplotlib.pyplot as plt\n",
"import pandas as pd\n",
- "from ipywidgets import widgets"
+ "from ipywidgets import widgets\n",
+ "import pandas as pd"
]
},
{
@@ -135,7 +136,7 @@
"metadata": {},
"outputs": [],
"source": [
- "model = ct.curve_model()\n",
+ "model = ct.curve_model('5yr', '10yr')\n",
"model_results = ct.curve_model_results(model[0], model[1])"
]
},
@@ -145,7 +146,10 @@
"metadata": {},
"outputs": [],
"source": [
- "model[1].summary()"
+ "#Scenario Anslysis on current position\n",
+ "report_date = (pd.datetime.today() - pd.offsets.BDay(1)).normalize()\n",
+ "curve_pos = ct.curve_pos(report_date)\n",
+ "origpv = curve_pos.pv"
]
},
{
@@ -154,8 +158,9 @@
"metadata": {},
"outputs": [],
"source": [
- "#Var on current position\n",
- "results = ct.curve_var()"
+ "flat_curve = ct.curve_shape(report_date, percentile = .05)\n",
+ "for ind in curve_pos.indices:\n",
+ " ind.spread = flat_curve((pd.to_datetime(ind.end_date) - trade_date).days/365)"
]
},
{
@@ -164,19 +169,8 @@
"metadata": {},
"outputs": [],
"source": [
- "model_results"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "results\n",
- "#first number: at 5% flatness\n",
- "#second number: at predicted curve shape\n",
- "#third number: at 5% flatness given duration and spread"
+ "#PNL in flattening to a 5% case\n",
+ "curve_pos.pv - origpv"
]
},
{
@@ -226,7 +220,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.6.4"
+ "version": "3.6.5"
}
},
"nbformat": 4,