from index_data import get_index_quotes, index_returns from analytics import Swaption, BlackSwaption, Index, VolatilitySurface, Portfolio from db import dbengine, dbconn from pandas.tseries.offsets import BDay import pandas as pd import math import datetime dawndb = dbengine('dawndb') serenitasdb = dbengine('serenitasdb') ## look at returns index, df, df1, returns = {}, {}, {}, {} shock = [] shock_values = [.95,.05] for ind in ['HY','IG']: index[ind] = Index.from_name(ind, 28, '5yr') df[ind] = index_returns(index=ind, series=[23, 24, 25, 26, 27, 28], tenor=['3yr', '5yr', '7yr', '10yr'], per = 22) #df1[ind] = get_index_quotes(index=ind, series=[23, 24, 25, 26, 27, 28], tenor=['3yr', '5yr', '7yr', '10yr']) returns[ind] = df[ind].price_return.unstack(-1).dropna().groupby(level='date').nth(-1) for percent in shock_values: #.95 = ~2 st dev tightening, .05 = widening shock.append([ind, percent, returns[ind].quantile(percent)['5yr']]) shock = pd.DataFrame.from_records(shock, columns=['index', 'percent', 'shock'], index=['index', 'percent']) index['IG'].spread = 62 index['HY'].price = 107.5 #get position by strategy folder trade_date = datetime.date.today() - BDay(2) df = pd.read_sql_query("SELECT security_id, notional * case when protection = 'Buyer' then -1 else 1 end as notional, " \ "maturity, folder FROM cds where trade_date < %s", dawndb, params = (trade_date,), parse_dates=['maturity']) df = df.groupby(['security_id','maturity','folder']).agg(sum) df = df[df.notional!=0].reset_index() df1 = pd.read_sql_query("SELECT redindexcode as security_id, index, series, indexfactor from index_version where lastdate = 'infinity'", serenitasdb) df2 = pd.read_sql_query("SELECT * from index_maturity", serenitasdb, parse_dates=['maturity']) df3 = pd.read_sql_query("SELECT index, series, tenor, duration " \ " FROM index_quotes where date = %s", dawndb, params = (trade_date,)) df = df.merge(df1, on='security_id') df = df.merge(df2, how='inner', on=['index','maturity', 'series']) df = df.merge(df3, on=['index','series','tenor']) df['ontr_notional'] = df.apply(lambda df: df.notional * df.indexfactor/100 * df.duration/(index[df['index']].DV01/index[df['index']].notional*10000), axis = 1) for percent in shock_values: df[percent] = df.apply(lambda df: df['ontr_notional'] * shock.loc[df['index'],percent], axis = 1) df1 = df.groupby(['folder']).agg(sum)[[.95,.05]] df1 = df1.min(axis = 1)