import sys sys.path.append("..") from graphics import plot_time_color_map import analytics.tranche_functions as tch import analytics.tranche_basket as bkt import analytics.basket_index as idx_bkt import numpy as np import pandas as pd from analytics import Swaption, BlackSwaption, Index, BlackSwaptionVolSurface, Portfolio from analytics.scenarios import run_swaption_scenarios, run_index_scenarios, run_portfolio_scenarios import exploration.swaption_calendar_spread as spread from scipy.interpolate import interp1d from datetime import date from db import dbengine engine = dbengine('serenitasdb') def rv_calc1(): #let's do IG27 from IG29, need to get the quotes from risk_numbers_new not just random ones #Get IG29-1 year shortened rho with TLP, compare to IG27 5y rho index = 'IG' series = 29 series2 = series -2 tenor = '5yr' shortened = 4 method = 'TLP' #Read existing results, find which ones need to run try: results = pd.read_csv(f"/home/serenitas/edwin/Python/rv_{index}{series}.csv", parse_dates=['date'], index_col=['date']) except IOError: results = pd.DataFrame() sql_string = "select distinct date from risk_numbers_new where index = %s and series = %s order by date desc" df = pd.read_sql_query(sql_string, engine, params=(index, series), parse_dates=['date']) df1 = pd.read_sql_query(sql_string, engine, params=(index, series2), parse_dates=['date']) df = df.merge(df1, on=['date']) df = df[~df.date.isin(results.index)] rho_tlp, pv_tlp, rho_prev_index, pv_prev_index = [], [], [], [] tranche = bkt.TrancheBasket('IG', series, '5yr') tranche2 = bkt.TrancheBasket('IG', series2, '5yr') for trade_date in df.date: tranche.trade_date = trade_date tranche2.trade_date = trade_date tranche.build_skew() tranche.rho = tranche.map_skew(tranche, method, 4) pv = tranche.tranche_pvs().bond_price rho_tlp.append(tranche.rho[1:-1]) pv_tlp.append(pv) tranche2.build_skew() rho_prev_index.append(tranche2.rho[1:-1]) tranche.rho = tranche2.rho pv = tranche.tranche_pvs(shortened=4).bond_price pv_prev_index.append(pv) temp1 = pd.DataFrame(rho_tlp, index=df.date, columns=['3_rho_tlp','7_rho_tlp','15_rho_tlp']) temp2 = pd.DataFrame(pv_tlp, index=df.date, columns=['03_pv_tlp','37_pv_tlp','715_pv_tlp','15100_pv_tlp']) temp3 = pd.DataFrame(rho_prev_index, index=df.date, columns=['3_rho_ig27','7_rho_ig27','15_rho_ig27']) temp4 = pd.DataFrame(pv_prev_index, index=df.date, columns=['03_pv_ig27','37_pv_ig27','715_pv_ig27','15100_pv_ig27']) results = results.append(pd.concat([temp1, temp2, temp3, temp4], axis=1)) result.to_csv("/home/serenitas/edwin/Python/rv_" + index + series + ".csv") def dispersion(): from quantlib.time.api import Schedule, Rule, Date, Period, WeekendsOnly from quantlib.settings import Settings curves = {} maturities = {} settings = Settings() for series in [24, 25, 26, 27, 28, 29]: index_temp = idx_bkt.MarkitBasketIndex('IG', series, ["5yr",], trade_date=trade_date) maturities[series] = index_temp.maturities[0] cds_schedule = Schedule.from_rule(settings.evaluation_date, Date.from_datetime(maturities[series]), Period('3M'), WeekendsOnly(), date_generation_rule=Rule.CDS2015) sm, tickers = index_temp.survival_matrix(cds_schedule.to_npdates().view('int') + 134774) curves[series] = pd.DataFrame(1 - sm, index=tickers, columns=cds_schedule) #temp = (pd.to_datetime(maturities[series]) - datetime.datetime(1970,1,1)).days + 134774 #curves[series] = pd.concat([c.to_series() for _,_, c in index_temp.items()], axis=1) curve_df = pd.concat(curves).stack() curve_df.index.rename(['series', 'maturity', 'name'], inplace=True) disp = {} for series in [24, 25, 26, 27, 28, 29]: temp = curve_df.xs([series, maturities[series].strftime('%Y-%m-%d')]) temp = temp[pd.qcut(temp, 10, labels=False) == 9] disp[series] = temp.std()/temp.mean() dispersion = pd.concat(disp) curve_df.groupby(['series', 'maturity']).mean() curve_df.groupby(['series', 'maturity']).std() def scenarios(tranche, shock_range=None, roll_corr=False): from copy import deepcopy tranche.build_skew() orig_tranche_cl, _, orig_tranche_pv = tranche.tranche_pvs() if shock_range is None: shock, step = 1, 10 shock_range = (1 + np.linspace(-.3, shock, step)) * tranche.tranche_quotes.indexrefspread[0] #create empty lists shock_index_pv_calc = np.empty(len(shock_range)) shock_tranche_pv = np.empty((len(shock_range), tranche.K.size - 1)) shock_tranche_delta = np.empty((len(shock_range), tranche.K.size - 1)) shock_tranche_cl = np.empty((len(shock_range), tranche.K.size - 1)) shock_tranche_carry = np.empty((len(shock_range), tranche.K.size - 1)) results = pd.DataFrame() for shortened in [0,1,2]: temp_tranche = deepcopy(tranche) if shortened > 0: temp_tranche.cs = temp_tranche.cs[:-shortened] for i, shock in enumerate(shock_range): temp_tranche.tweak(shock) if roll_corr is True: temp_tranche.rho = tranche.map_skew(temp_tranche, 'TLP') shock_index_pv_calc[i] = temp_tranche._snacpv(shock * 1e-4, temp_tranche.coupon(temp_tranche.maturity), temp_tranche.recovery) shock_tranche_cl[i], _, shock_tranche_pv[i] = temp_tranche.tranche_pvs() shock_tranche_delta[i] = temp_tranche.tranche_deltas()['delta'] shock_tranche_carry[i] = temp_tranche.tranche_quotes.running temp1 = pd.DataFrame(shock_tranche_pv, index=shock_range, columns=[s + "_pv" for s in tranche._row_names]) temp2 = pd.DataFrame(shock_tranche_delta, index=shock_range, columns=[s + "_delta" for s in tranche._row_names]) temp3 = pd.DataFrame(np.subtract(shock_tranche_pv, orig_tranche_pv), index=shock_range, columns=[s + "_pnl" for s in tranche._row_names]) temp4 = pd.DataFrame(shock_index_pv_calc, index=shock_range, columns=['index_price_snacpv']) temp5 = pd.DataFrame(shock_tranche_carry, index=shock_range, columns=[s + "_carry" for s in tranche._row_names]) #temp5 = pd.DataFrame(np.subtract(shock_tranche_cl, orig_tranche_cl), index=shock_range, columns=[s + "_coupon_pnl" for s in tranche._row_names]) df = pd.concat([temp1, temp2, temp3, temp4, temp5], axis=1) if shortened > 0: df['days'] = ((tranche.cs.index[-1] - tranche.cs.index[-shortened-1])/ np.timedelta64(1, 'D')).astype(int) else: df['days'] = 0 for column in [s + "_carry" for s in tranche._row_names]: df[column] *= df['days']/365 results = results.append(df) return results def run_scen(trade_date = pd.Timestamp.today().normalize()- pd.offsets.BDay()): option_delta = Index.from_tradeid(910) option1 = BlackSwaption.from_tradeid(13, option_delta) option2 = BlackSwaption.from_tradeid(12, option_delta) portf = Portfolio([option1, option2, option_delta]) #Start with swaptions portf.reset_pv() portf.mark() earliest_date = min(portf.swaptions, key=lambda x: x.exercise_date).exercise_date #date_range = pd.bdate_range(portf.indices[0].trade_date, earliest_date - BDay(), freq = '3B') date_range = pd.date_range(trade_date, periods=4, freq = '5B') vol_shock = np.arange(-0.01, 0.01, 0.01) shock_min=-.3 shock_max=.8 spread_shock = np.arange(shock_min, shock_max, 0.05) index = portf.indices[0].name.split()[1] series = portf.indices[0].name.split()[3][1:] vs = BlackSwaptionVolaSurface(index, series, trade_date=trade_date) vol_surface = vs[vs.list(option_type='payer')[-1]] df = run_portfolio_scenarios(portf, date_range, spread_shock, vol_shock, vol_surface, params=["pnl","delta"]) df = df[df.vol_shock == 0] df['days'] = ((df.index - trade_date)/ np.timedelta64(1, 'D')).astype(int) #now do the tranches tranche = bkt.TrancheBasket('IG', 29, '5yr', trade_date=trade_date) shock_range = (1 + spread_shock) * portf.indices[0].spread results = scenarios(tranche, shock_range, date_range) results.set_index('days', append=True) notional = 10000000 results['delta'] = -notional * (results['0-3_delta'] - 6* results['7-15_delta']) results['pnl'] = notional* (results['0-3_pnl'] + results['0-3_carry'] - 6* (results['7-15_pnl'] + results['7-15_carry'])) results['date'] = tranche.trade_date + results.days * pd.offsets.Day() results.index.name = 'spread' #now combine the results f = {} for i, g in results.groupby('spread'): f[i] = interp1d(g.days, g.pnl) df['total_pnl'] = df.apply(lambda df: f[df.spread](df.days), axis = 1) df.total_pnl = df.total_pnl.astype(float) return results, df, shock_range def plot_pnl(): a, b, shock_range = run_scen() a.reset_index(inplace=True) a.set_index('date', inplace=True) #plot Tranche only PNL plot_time_color_map(a, shock_range, attr="pnl") #plot swaption only PNL plot_time_color_map(b, shock_range, attr="pnl") #plot Tranche and Swaption PNL plot_time_color_map(b, shock_range, attr="total_pnl")