from db import dbconn, dbengine from analytics import TrancheBasket from pandas.tseries.offsets import BDay import datetime import logging import os import pandas as pd from pathlib import Path from yaml import load from pickle import load as pload import argparse def get_lastdate(conn, index, series, tenor): sql_str = ("SELECT max(date)::date + 1 AS date FROM risk_numbers_new " "WHERE index=%s and series = %s and tenor = %s") with conn.cursor() as c: c.execute(sql_str, (index, series, tenor)) date, = c.fetchone() conn.commit() return date if __name__ == "__main__": logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s') logger = logging.getLogger('tranche_calib') logger.setLevel(logging.INFO) parser = argparse.ArgumentParser() parser.add_argument("-u", "--update", action="store_true", default=False, help="Update from the last run date [default %default]") parser.add_argument("-c", "--config", metavar="config_file", help="Runs the list of indices provided in CONFIG_FILE") parser.add_argument("-i", "--index", help="Index name we want to run") parser.add_argument("--tenor", default="5yr", help="Tenor we want to run [default '5yr']") parser.add_argument("--until", default=pd.Timestamp.now()-BDay(), type=lambda s: pd.Timestamp(s)) args = parser.parse_args() CODE_DIR = Path(os.environ['CODE_DIR']) start_dates = {'ig19': datetime.date(2013, 5, 1), 'ig21': datetime.date(2013, 9, 26), 'ig23': datetime.date(2014, 10, 14), 'ig25': datetime.date(2015, 9, 22), 'ig27': datetime.date(2016, 9, 27), 'ig29': datetime.date(2017, 9, 26), 'ig31': datetime.date(2018, 9, 25)} serenitas_engine = dbengine('serenitasdb') serenitas_conn = serenitas_engine.raw_connection() if args.config is None: if args.index is None: raise ValueError("Please provide an index to run") config = {'runs': [args.index, args.tenor]} else: with (CODE_DIR / "etc" / args.config).open("r") as fh: config = load(fh) for index, tenor in config['runs']: index, series = index[:2].upper(), int(index[2:]) if args.update: begin_date = get_lastdate(serenitas_conn, index, series, tenor) if not args.update or begin_date is None: try: begin_date = start_dates[f"{index.lower()}{series}"] except KeyError: print(index, series) continue dr = pd.bdate_range(begin_date, args.until) if dr.empty: continue logger.info(f"calibrating {index}, {series}, {tenor}") tranche_index = None data = {} for d in dr: try: if tranche_index is None: tranche_index = TrancheBasket(index, series, tenor, value_date=d.date()) else: tranche_index.value_date = d.date() except ValueError as e: logger.warning(e) continue logger.debug(d.date()) tranche_index.tweak() try: tranche_index.build_skew() except ValueError as e: logger.error(e) continue df = pd.concat([tranche_index.tranche_deltas(), tranche_index.tranche_thetas(), tranche_index.tranche_fwd_deltas(), tranche_index.tranche_durations(), tranche_index.tranche_EL(), tranche_index.tranche_spreads()], axis=1) df['index_duration'], df['index_expected_loss'], df['index_price'] = \ tranche_index.index_pv() df['index_expected_loss'] *= -1 df['index_duration'] -= tranche_index.accrued() df['index_basis'] = tranche_index.tweaks[0] df['index_theta'] = tranche_index.theta()[tenor] df['tranche_id'] = tranche_index.tranche_quotes.id.values df['corr_at_detach'] = tranche_index.rho[1:] df['corr01'] = tranche_index.tranche_corr01() del df['fwd_gamma'] data[d] = df pd.concat(data).to_sql("tranche_risk", serenitas_engine, if_exists='append', index=False)