diff options
Diffstat (limited to 'python/analytics/scenarios.py')
| -rw-r--r-- | python/analytics/scenarios.py | 108 |
1 files changed, 93 insertions, 15 deletions
diff --git a/python/analytics/scenarios.py b/python/analytics/scenarios.py index d3ff5652..b389d7a3 100644 --- a/python/analytics/scenarios.py +++ b/python/analytics/scenarios.py @@ -1,4 +1,5 @@ from analytics import ATMstrike +from joblib import delayed, Parallel import pandas as pd from copy import deepcopy import numpy as np @@ -8,6 +9,7 @@ from functools import partial from multiprocessing import Pool from .index_data import _get_singlenames_curves from .curve_trades import curve_shape +from scipy.interpolate import RectBivariateSpline def run_swaption_scenarios(swaption, date_range, spread_shock, vol_shock, vol_surface, params=["pv"], vol_time_roll=True): @@ -109,49 +111,125 @@ def run_tranche_scenarios(tranche, spread_range, date_range, corr_map=False): corr_map: static correlation or mapped correlation """ - #create empty lists - index_pv = np.empty_like(spread_range) - tranche_pv = np.empty((len(spread_range), tranche.K.size - 1)) - tranche_delta = np.empty((len(spread_range), tranche.K.size - 1)) - - tranche.build_skew() + if np.isnan(tranche.rho[2]): + tranche.build_skew() temp_tranche = deepcopy(tranche) _get_singlenames_curves.cache_clear() orig_tranche_pvs = tranche.tranche_pvs().bond_price results = [] - print(tranche.tranche_pvs().bond_price) + index_pv = np.empty_like(spread_range) + tranche_pv = np.empty((len(spread_range), tranche.K.size - 1)) + tranche_delta = np.empty((len(spread_range), tranche.K.size - 1)) for d in date_range: temp_tranche.value_date = d.date() for i, spread in enumerate(spread_range): temp_tranche.tweak(spread) - print(tranche.tranche_pvs().bond_price) if corr_map: temp_tranche.rho = tranche.map_skew(temp_tranche, 'TLP') index_pv[i] = temp_tranche._snacpv(spread * 1e-4, - temp_tranche.coupon(temp_tranche.maturity), - temp_tranche.recovery) + temp_tranche.coupon(temp_tranche.maturity), + temp_tranche.recovery) tranche_pv[i] = temp_tranche.tranche_pvs().bond_price tranche_delta[i] = temp_tranche.tranche_deltas()['delta'] carry = temp_tranche.tranche_quotes.running * \ (d.date() - tranche.value_date).days / 360 df = pd.concat({'pv': pd.DataFrame(tranche_pv, index=spread_range, - columns=tranche._row_names), + columns=tranche._row_names), 'delta': pd.DataFrame(tranche_delta, index=spread_range, - columns=tranche._row_names), + columns=tranche._row_names), 'carry': pd.DataFrame( np.tile(carry, (len(spread_range), 1)), index=spread_range, columns=tranche._row_names)}, - axis=1) + axis=1) df = df.join( pd.concat({'pnl': df['pv'].sub(orig_tranche_pvs), - 'index_price_snac_pv': pd.Series(index_pv, index=spread_range, + 'index_price_snac_pv': pd.Series(index_pv, index=spread_range, name='pv')}, - axis=1)) + axis=1)) results.append(df) results = pd.concat(results, keys=date_range) results.index.names = ['date', 'spread_range'] return results +def run_tranche_scenarios_rolldown(tranche, spread_range, date_range, corr_map=False): + """computes the pnl of a tranche for a range of spread scenarios + curve roll down from the back, and valuations interpolated in the dates in between + + Parameters + ---------- + tranche : TrancheBasket + spread_range : `np.array`, spread range to run (different from swaption) + corr_map: static correlation or mapped correlation + """ + + if np.isnan(tranche.rho[2]): + tranche.build_skew() + temp_tranche = deepcopy(tranche) + orig_tranche_pvs = tranche.tranche_pvs().bond_price + + #create blanks + index_pv, tranche_pv, tranche_delta = [], [], [] + tranche_pv_f, tranche_delta_f = [], [] + + #do less scenarios, takes less time since the convexity is not as strong as swaptions + days = np.diff((tranche.cs.index - date_range[0]).days.values) + num_shortened = np.sum(tranche.cs.index < date_range[-1]) + shorten_by = np.arange(0, max(1, num_shortened)+1, 1) + days = np.append(0, np.cumsum(np.flip(days,0))[:len(shorten_by)-1]) + smaller_spread_range = np.linspace(spread_range[0], spread_range[-1], 10) + for i, spread in enumerate(smaller_spread_range): + for shortened in shorten_by: + if shortened > 0: + temp_tranche.cs = tranche.cs.iloc[:-shortened] + else: + temp_tranche.cs = tranche.cs + temp_tranche.tweak(spread) + if corr_map: + temp_tranche.rho = tranche.map_skew(temp_tranche, 'TLP') + index_pv.append(temp_tranche.index_pv().bond_price) + tranche_pv.append(temp_tranche.tranche_pvs().bond_price) + tranche_delta.append(temp_tranche.tranche_deltas()['delta']) + index_pv = np.reshape(index_pv, (smaller_spread_range.shape[0], days.shape[0])).transpose() + tranche_pv = np.array(tranche_pv).transpose() + tranche_delta = np.array(tranche_delta).transpose() + index_pv_f = RectBivariateSpline(days, smaller_spread_range, index_pv, kx=1, ky=1) + for pv, delta in zip(tranche_pv, tranche_delta): + pv = np.reshape(pv, (smaller_spread_range.shape[0], days.shape[0])).transpose() + delta = np.reshape(delta, (smaller_spread_range.shape[0], days.shape[0])).transpose() + tranche_pv_f.append(RectBivariateSpline(days, smaller_spread_range, pv, kx=1, ky=1)) + tranche_delta_f.append(RectBivariateSpline(days, smaller_spread_range, delta, kx=1, ky=1)) + + #Reset the blanks + date_range_days = (date_range - date_range[0]).days.values + tranche_pv = np.empty((tranche.K.size - 1, len(date_range_days), len(spread_range))) + tranche_delta = np.empty((tranche.K.size - 1, len(date_range_days), len(spread_range))) + index_pv = index_pv_f(date_range_days, spread_range) + for i in range(len(tranche_pv_f)): + tranche_pv[i] = tranche_pv_f[i](date_range_days, spread_range) + tranche_delta[i] = tranche_delta_f[i](date_range_days, spread_range) + index_pv = index_pv.reshape(1,len(date_range_days) * len(spread_range)).T + tranche_pv = tranche_pv.reshape(len(tranche._row_names),len(date_range_days) * len(spread_range)).T + tranche_delta = tranche_delta.reshape(len(tranche._row_names),len(date_range_days) * len(spread_range)).T + days_diff = np.tile(((date_range - date_range[0]).days/360).values, len(tranche._row_names)) + carry = pd.DataFrame(days_diff.reshape(len(tranche._row_names),len(date_range)).T, + index=date_range, + columns=pd.MultiIndex.from_product([['carry'], tranche._row_names])) + carry.index.name = 'date' + df = pd.concat({'index_pv': pd.DataFrame(index_pv, + index=pd.MultiIndex.from_product([date_range, spread_range]), + columns=['index_pv']), + 'pv': pd.DataFrame(tranche_pv, + index=pd.MultiIndex.from_product([date_range, spread_range]), + columns=tranche._row_names), + 'delta': pd.DataFrame(tranche_delta, + index=pd.MultiIndex.from_product([date_range, spread_range]), + columns=tranche._row_names)}, + axis=1) + df.index.names = ['date', 'spread_range'] + df = df.join(carry) + df = df.join(pd.concat({'pnl': df['pv'].sub(orig_tranche_pvs)}, axis=1)) + return df + def run_curve_scenarios(portf, spread_range, date_range, curve_per): """computes the pnl of a portfolio of indices for a range of spread/curve scenarios |
