aboutsummaryrefslogtreecommitdiffstats
path: root/python/exploration/test_cms.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/exploration/test_cms.py')
-rw-r--r--python/exploration/test_cms.py110
1 files changed, 94 insertions, 16 deletions
diff --git a/python/exploration/test_cms.py b/python/exploration/test_cms.py
index 31c96029..617ef53e 100644
--- a/python/exploration/test_cms.py
+++ b/python/exploration/test_cms.py
@@ -1,30 +1,108 @@
-from analytics.cms_spread import CmsSpread
+from analytics.cms_spread import CmsSpread, _dawndb
+from db import dbconn
+import datetime
from pathlib import Path
+from quantlib.time.api import Date, Days, UnitedStates
+from scipy.optimize import brentq
import os
import pandas as pd
+import re
DAILY_DIR = Path(os.environ["DAILY_DIR"])
-r = []
-today = pd.Timestamp.today()
-trade = CmsSpread.from_tradeid(1)
-dr = pd.bdate_range("2018-01-19", today, closed="left", normalize=True)
-for d in dr:
- trade.value_date = d
- r.append(trade.pv)
-def gs_navs(trade_id='LTAAB4ZN3333L6TTSH7.0.0.0'):
+def lmcg_navs(trade_id):
+ r = []
+ today = pd.Timestamp.today()
+ trade = CmsSpread.from_tradeid(trade_id)
+ trade.corr = 0.72
+ dr = pd.bdate_range(trade.value_date, today, closed="left", normalize=True)
+ for d in dr:
+ trade.value_date = d
+ r.append(trade.pv)
+ return pd.Series(r, dr)
+
+
+def gs_navs(trade_id, orig_nav):
# load gs navs for a given trade
dates = []
r = []
+ cal = UnitedStates()
for fname in (DAILY_DIR / "GS_reports").glob("Trade_Detail*.xls"):
- m = re.match("[^\d]*(\d{2}_.{3}_\d{4})", fname.name)
- if m:
- date_string, = m.groups()
- dates.append(datetime.datetime.strptime(date_string, "%d_%b_%Y"))
df = pd.read_excel(fname, skiprows=9, skipfooter=77)
- r.append(df.set_index('Trade Id').loc[trade_id, 'NPV (USD)'])
+ df = df.set_index('Trade Id')
+ try:
+ npv = df.loc[trade_id, 'NPV (USD)']
+ trade_date = Date.from_datetime(pd.Timestamp(df.loc[trade_id, 'Trade Date']))
+ except KeyError:
+ continue
+ else:
+ m = re.match(r"[^\d]*(\d{2}_.{3}_\d{4})", fname.name)
+ if m:
+ date_string, = m.groups()
+ date = datetime.datetime.strptime(date_string, "%d_%b_%Y")
+ current_date = Date.from_datetime(date)
+ if cal.advance(trade_date, 2, Days) > current_date:
+ npv -= orig_nav
+ dates.append(date)
+ r.append(npv)
+
s = pd.Series(r, dates)
s = s.sort_index()
- #remove the IA until it settled
- s[:2] -= 68750.00
return -s
+
+
+def ms_navs(trade_id="JWY3N"):
+ dates = []
+ r = []
+ for fname in (DAILY_DIR / "MS_reports").glob("Trade_Detail*.xls"):
+ df = pd.read_excel(fname)
+ try:
+ r.append(df.set_index('trade_id').loc[trade_id, 'exposure_in_rpt_ccy'])
+ except KeyError:
+ continue
+ else:
+ m = re.match(r"[^\d]*(\d{8})", fname.name)
+ if m:
+ date_string, = m.groups()
+ dates.append(datetime.datetime.strptime(date_string, "%Y%m%d"))
+ s = pd.Series(r, dates)
+ s = s.sort_index()
+ s[:2] += 105_000
+ return s
+
+
+if __name__ == "__main__":
+ with _dawndb.cursor() as c:
+ c.execute("SELECT id, cpty_id, amount * premium_percent / 100 AS orig_nav "
+ "FROM capfloors WHERE cp_code='GOLDNY'")
+ gs_mapping = {r['id']: (r['cpty_id'], r['orig_nav']) for r in c}
+ # s1 = lmcg_navs(3)
+ # s2 = gs_navs(d[3])
+ # df = pd.concat([s1, s2], axis=1)
+ # s2 = lmcg_navs(4)
+ # s3 = ms_navs()
+ # df = pd.concat([s2, s3], axis=1)
+ # s = gs_navs()
+ # s2 = lmcg_navs(1)
+ # df = pd.concat([s, s2], axis=1)
+ # for gs_id in gs_mapping.values():
+ # s = gs_navs(gs_id)
+ # with dbconn('dawndb') as conn:
+ # with conn.cursor() as c:
+ # for d, val in s.items():
+ # c.execute("INSERT INTO external_marks_deriv "
+ # "VALUES(%s, %s, %s)",
+ # (d, gs_id, val))
+
+ # compute implied correlation from external mark
+ s = gs_navs(*gs_mapping[1])
+ trade = CmsSpread.from_tradeid(1)
+ def myfun(corr, args):
+ trade.corr = corr
+ return trade.pv - args
+ rho = []
+ for d in s.index:
+ trade.value_date = d
+ x0 = brentq(myfun, 0.5, 0.99, (s[d],))
+ rho.append(x0)
+ pd.Series(rho, s.index).plot()