aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/analytics/curve_trades.py92
1 files changed, 30 insertions, 62 deletions
diff --git a/python/analytics/curve_trades.py b/python/analytics/curve_trades.py
index 98361cad..36ff319e 100644
--- a/python/analytics/curve_trades.py
+++ b/python/analytics/curve_trades.py
@@ -240,33 +240,20 @@ def curve_pos(value_date, index='IG'):
Returns a Portfolio of curve trades '''
- sql_string = "SELECT * FROM cds where trade_date < %s"
- df = pd.read_sql_query(sql_string, dawndb, parse_dates=['trade_date', 'maturity'],
- params=[value_date])
- df = df[df['folder'] == f'SER_{index}CURVE']
- df.notional = df.notional.where(df.protection == 'Seller', -df.notional)
- df = df.groupby(['security_id', 'maturity'])['notional'].sum()
- df = df.iloc[df.nonzero()[0]].reset_index()
+ sql_string = "SELECT index, series, tenor, notional "\
+ "FROM list_cds_positions(%s, %s) " \
+ "JOIN index_desc " \
+ "ON security_id=redindexcode AND " \
+ "index_desc.maturity=list_cds_positions.maturity"
+ df = pd.read_sql_query(sql_string, dawndb,
+ params=[value_date, f'SER_{index}CURVE'])
- sql_string = "SELECT * FROM index_maturity LEFT JOIN index_version USING (index, series)"
- lookup_table = pd.read_sql_query(sql_string, serenitasdb, parse_dates=['maturity'])
-
- df = df.merge(lookup_table, left_on=['security_id','maturity'], right_on=['redindexcode', 'maturity'])
-
- indices = []
- sql_string = "SELECT closespread FROM index_quotes where index = %s and series = %s and tenor = %s and date = %s"
- for i, row in df[['index', 'tenor', 'series', 'notional']].iterrows():
- temp = Index.from_name(row['index'], row.series, row.tenor)
- temp.value_date = value_date.date()
- if row.notional > 0:
- temp.direction = 'Seller'
- temp.notional = abs(row.notional)
- spread_df = pd.read_sql_query(sql_string, serenitasdb,
- params=[row['index'], row.series, row.tenor, value_date.date()])
- temp.spread = spread_df.iloc[0][0]
- indices.append(temp)
-
- return Portfolio(indices)
+ portf = Portfolio([Index.from_name(row.index, row.series, row.tenor,
+ value_date, -row.notional)
+ for row in df[['index', 'tenor', 'series', 'notional']].
+ itertuples(index=False)])
+ portf.mark()
+ return portf
def curve_shape(value_date, index='IG', percentile=.95, spread=None):
@@ -318,43 +305,24 @@ def pos_pnl_abs(portf, value_date, index='IG', rolling=6, years=3):
df = pd.DataFrame.from_records(chain(*r), columns=['date', 'five_yr_spread', 'pnl'])
return df.set_index('date')
-def curve_scen_table(portf):
-
+def curve_scen_table(portf, shock=10):
'''
- Runs PNL scenario on portf by shocking different points on the curve - off-the-runs shocks are duration linearly interpolated'''
-
- portf_copy = deepcopy(portf)
- portf_copy.reset_pv()
-
- index = portf_copy.indices[0].index_type
- series = on_the_run(index)
- sql_string = "SELECT tenor, maturity FROM index_maturity where index = %s and series = %s"
- lookup_table = pd.read_sql_query(sql_string, serenitasdb, parse_dates=['maturity'], params=[index, series])
- lookup_table['year_frac'] = (lookup_table.maturity - pd.to_datetime(portf_copy.value_date)).dt.days/365
-
- lookup_table = lookup_table.iloc[[0,1,3]]
- year_frac = lookup_table.year_frac.tolist()
- tenors = lookup_table.tenor.tolist()
- shock = 10 #shocks in bps
+ Runs PNL scenario on portf by shocking different points on the curve.
+ off-the-runs shocks are linearly interpolated'''
+ otr_year_frac = np.array([(e - portf.value_date).days / 365 \
+ for e in roll_date(portf.value_date, [3, 5, 10])])
+ portf_year_frac = [(ind.end_date - ind.value_date).days / 365 for ind in portf.indices]
r = []
-
- for t_frac, t_ten in zip(year_frac, tenors):
- tenor_shock = pd.DataFrame(0, index=lookup_table['year_frac'], columns=['shock'])
- tenor_shock.loc[t_frac] = -shock
- for w_frac, w_ten in zip(year_frac, tenors):
- tenor_shock.loc[w_frac] = 0 if t_ten == w_ten else shock
+ for i, tenor1 in enumerate(['3yr', '5yr', '10yr']):
+ for j, tenor2 in enumerate(['3yr', '5yr', '10yr']):
+ shocks = np.full(4, 0)
+ shocks[i+1] += shock
+ shocks[j+1] -= shock
#f is the shock amount interpolated based on tenor
- f = interp1d(np.hstack([0, year_frac]), np.hstack([tenor_shock.shock.iloc[0], tenor_shock.shock]))
- for i, ind in enumerate(portf_copy.indices):
- ind.spread = max(0, portf.indices[i].spread + f((ind.end_date - portf_copy.value_date).days/365))
- r.append([t_ten, w_ten] + [portf_copy.pv - portf.pv])
- tenor_shock.loc[w_frac] = -shock if t_ten == w_ten else 0
- tenor_shock.loc[t_frac] = 0
+ f = interp1d(np.hstack([0, otr_year_frac]), shocks)
+ portf_copy = deepcopy(portf)
+ portf_copy.reset_pv()
+ for ind, yf in zip(portf_copy.indices, portf_year_frac):
+ ind.spread += float(f(yf))
+ r.append((tenor1, tenor2, portf_copy.pnl))
return pd.DataFrame.from_records(r, columns=['tighter', 'wider', 'pnl'])
-
-
-
-
-
-
-