from index_data import get_index_quotes, index_returns, _serenitas_engine from analytics import Swaption, BlackSwaption, Index, VolatilitySurface, Portfolio from db import dbengine, dbconn from pandas.tseries.offsets import BDay import numpy as np import pandas as pd import math import datetime dawndb = dbengine('dawndb') serenitasdb = dbengine('serenitasdb') def get_spreads(): ## look at returns index, df, df1, returns = {}, {}, {}, {} shock = [] shock_values = [.95,.05] for ind in ['HY','IG']: index[ind] = Index.from_name(ind, 28, '5yr') df[ind] = index_returns(index=ind, series=[23, 24, 25, 26, 27, 28], tenor=['3yr', '5yr', '7yr', '10yr'], per = 22) #df1[ind] = get_index_quotes(index=ind, series=[23, 24, 25, 26, 27, 28], tenor=['3yr', '5yr', '7yr', '10yr']) returns[ind] = df[ind].price_return.unstack(-1).dropna().groupby(level='date').nth(-1) for percent in shock_values: #.95 = ~2 st dev tightening, .05 = widening shock.append([ind, percent, returns[ind].quantile(percent)['5yr']]) shock = pd.DataFrame.from_records(shock, columns=['index', 'percent', 'shock'], index=['index', 'percent']) index['IG'].spread = 62 index['HY'].price = 107.5 def get_pos(): #get position by strategy folder trade_date = datetime.date.today() - BDay(1) df = pd.read_sql_query("SELECT security_id, notional * case when protection = 'Buyer' then -1 else 1 end as notional, " \ "maturity, folder FROM cds where trade_date < %s and termination_amount is NULL", dawndb, params = (trade_date,), parse_dates=['maturity']) df = df.groupby(['security_id','maturity','folder']).agg(sum) df = df[df.notional!=0].reset_index() df1 = pd.read_sql_query("SELECT redindexcode as security_id, index, series, indexfactor from index_version where lastdate = 'infinity'", serenitasdb) df2 = pd.read_sql_query("SELECT * from index_maturity", serenitasdb, parse_dates=['maturity']) df3 = pd.read_sql_query("SELECT index, series, tenor, duration " \ " FROM index_quotes where date = %s", dawndb, params = (trade_date,)) df = df.merge(df1, on='security_id') df = df.merge(df2, how='inner', on=['index','maturity', 'series']) df = df.merge(df3, on=['index','series','tenor']) df['ontr_notional'] = df.apply(lambda df: df.notional * df.indexfactor/100 * df.duration/(index[df['index']].DV01/index[df['index']].notional*10000), axis = 1) for percent in shock_values: df[percent] = df.apply(lambda df: df['ontr_notional'] * shock.loc[df['index'],percent], axis = 1) df1 = df.groupby(['folder']).agg(sum)[[.95,.05]] df1 = df1.min(axis = 1) def on_the_run(index): r = _serenitas_engine.execute("SELECT max(series) FROM index_version WHERE index=%s", (index,)) series, = r.fetchone() return series def rel_spread_diff(index='HY', rolling=10): otr = on_the_run(index) ## look at spreads df = get_index_quotes(index, list(range(otr - rolling, otr + 1)), tenor=['3yr', '5yr', '7yr', '10yr'], years=5) duration = df.duration.xs(('5yr',28), level = ['tenor','series'])[-1] df = df.xs('5yr', level = 'tenor')['closespread'].groupby(['date','series']).last() df = df.loc['2013-01-15':] curr_spread = df.iloc[-1] df = df.unstack(-1).pct_change(freq = '22B').stack() return df.groupby('date').last(), curr_spread, duration for x, y in stress.iteritems(): portf.ref =