aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/calibrate_tranches_BC.py107
1 files changed, 83 insertions, 24 deletions
diff --git a/python/calibrate_tranches_BC.py b/python/calibrate_tranches_BC.py
index 7b55ae38..bab84901 100644
--- a/python/calibrate_tranches_BC.py
+++ b/python/calibrate_tranches_BC.py
@@ -1,46 +1,104 @@
from db import dbconn, dbengine
from analytics import TrancheBasket
+from pandas.tseries.offsets import BDay
import datetime
+import logging
+import os
import pandas as pd
+from pathlib import Path
from yaml import load
+from pickle import load as pload
+import argparse
-start_dates = {'ig19': datetime.date(2013, 5, 1),
- 'ig21': datetime.date(2013, 9, 26),
- 'ig23': datetime.date(2014, 10, 14),
- 'ig25': datetime.date(2015, 9, 22),
- 'ig27': datetime.date(2016, 9, 27),
- 'ig29': datetime.date(2017, 9, 26),
- 'ig31': datetime.date(2018, 9, 25)}
+def get_lastdate(conn, index, series, tenor):
+ sql_str = ("SELECT max(date)::date + 1 AS date FROM risk_numbers_new "
+ "WHERE index=%s and series = %s and tenor = %s")
+ with conn.cursor() as c:
+ c.execute(sql_str, (index, series, tenor))
+ date, = c.fetchone()
+ conn.commit()
+ return date
-serenitas_engine = dbengine('serenitasdb')
+if __name__ == "__main__":
+ logging.basicConfig(level=logging.INFO,
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+ logger = logging.getLogger('tranche_calib')
+ logger.setLevel(logging.INFO)
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-u", "--update", action="store_true", default=False,
+ help="Update from the last run date [default %default]")
+ parser.add_argument("-c", "--config", metavar="config_file",
+ help="Runs the list of indices provided in CONFIG_FILE")
+ parser.add_argument("-i", "--index", help="Index name we want to run")
+ parser.add_argument("--tenor", default="5yr", help="Tenor we want to run [default '5yr']")
+ parser.add_argument("--until", default=pd.Timestamp.now()-BDay(),
+ type=lambda s: pd.Timestamp(s))
+ args = parser.parse_args()
+ CODE_DIR = Path(os.environ['CODE_DIR'])
-with open("/home/guillaume/projects/code/etc/runs.yml") as fh:
- runs = load(fh)['runs']
- for index, tenor in runs:
- print(index, tenor)
- if index not in start_dates:
- continue
- else:
- begin_date = start_dates[index]
+ start_dates = {'ig19': datetime.date(2013, 5, 1),
+ 'ig21': datetime.date(2013, 9, 26),
+ 'ig23': datetime.date(2014, 10, 14),
+ 'ig25': datetime.date(2015, 9, 22),
+ 'ig27': datetime.date(2016, 9, 27),
+ 'ig29': datetime.date(2017, 9, 26),
+ 'ig31': datetime.date(2018, 9, 25)}
+
+ serenitas_engine = dbengine('serenitasdb')
+ serenitas_conn = serenitas_engine.raw_connection()
+ if args.config is None:
+ if args.index is None:
+ raise ValueError("Please provide an index to run")
+ config = {'runs': [args.index, args.tenor]}
+ else:
+ with (CODE_DIR / "etc" / args.config).open("r") as fh:
+ config = load(fh)
+
+ for index, tenor in config['runs']:
index, series = index[:2].upper(), int(index[2:])
- tranche_index = TrancheBasket(index, series, tenor)
- dr = pd.bdate_range(begin_date, "2018-10-10")
+ if args.update:
+ begin_date = get_lastdate(serenitas_conn, index, series, tenor)
+ if not args.update or begin_date is None:
+ try:
+ begin_date = start_dates[f"{index.lower()}{series}"]
+ except KeyError:
+ print(index, series)
+ continue
+
+ dr = pd.bdate_range(begin_date, args.until)
+ if dr.empty:
+ continue
+ logger.info(f"calibrating {index}, {series}, {tenor}")
+ tranche_index = None
+
+ data = {}
for d in dr:
- print(d)
try:
- tranche_index.value_date = d
+ if tranche_index is None:
+ tranche_index = TrancheBasket(index, series, tenor, value_date=d.date())
+ else:
+ tranche_index.value_date = d.date()
except ValueError as e:
- print(e)
+ logger.warning(e)
continue
+
+ logger.debug(d.date())
+
tranche_index.tweak()
- tranche_index.build_skew()
+ try:
+ tranche_index.build_skew()
+ except ValueError as e:
+ logger.error(e)
+ continue
+
df = pd.concat([tranche_index.tranche_deltas(),
tranche_index.tranche_thetas(),
tranche_index.tranche_fwd_deltas(),
tranche_index.tranche_durations(),
tranche_index.tranche_EL(),
tranche_index.tranche_spreads()], axis=1)
- df['index_duration'], df['index_expected_loss'], df['index_price'] = tranche_index.index_pv()
+ df['index_duration'], df['index_expected_loss'], df['index_price'] = \
+ tranche_index.index_pv()
df['index_expected_loss'] *= -1
df['index_duration'] -= tranche_index.accrued()
df['index_basis'] = tranche_index.tweaks[0]
@@ -49,4 +107,5 @@ with open("/home/guillaume/projects/code/etc/runs.yml") as fh:
df['corr_at_detach'] = tranche_index.rho[1:]
df['corr01'] = tranche_index.tranche_corr01()
del df['fwd_gamma']
- df.to_sql("tranche_risk", serenitas_engine, if_exists='append', index=False)
+ data[d] = df
+ pd.concat(data).to_sql("tranche_risk", serenitas_engine, if_exists='append', index=False)