aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/reto.py81
1 files changed, 72 insertions, 9 deletions
diff --git a/python/reto.py b/python/reto.py
index d6114583..7ba2bb86 100644
--- a/python/reto.py
+++ b/python/reto.py
@@ -1,18 +1,24 @@
import datetime
+import argparse
+import warnings
+import logging
+from copy import deepcopy
import pandas as pd
import numpy as np
-import argparse
from risk.portfolio import build_portfolio, generate_vol_surface
import serenitas.analytics as ana
from serenitas.analytics.scenarios import run_portfolio_scenarios
from serenitas.analytics.base import Trade
-from serenitas.utils.db2 import dbconn
+from serenitas.analytics.index_data import load_all_curves
from serenitas.analytics.dates import prev_business_day
+from serenitas.utils.db2 import dbconn
+from serenitas.utils.pool import serenitas_pool
def gen_shocks(portf, shock_date, fund):
+ portf = deepcopy(portf)
Trade.init_ontr(shock_date)
ana._local = False
ontr_spread = Trade._ontr["HY"].spread
@@ -77,7 +83,7 @@ def gen_shocks(portf, shock_date, fund):
return results
-def save_shocks(date, df, fund):
+def save_shocks(conn, date, df, fund):
with conn.cursor() as c:
c.execute(
"DELETE FROM shocks WHERE fund=%s AND date=%s",
@@ -87,15 +93,52 @@ def save_shocks(date, df, fund):
),
)
conn.commit()
- with conn.cursor() as c:
c.executemany(
"INSERT INTO shocks VALUES (%s, %s, %s, %s, %s, %s)",
[(*t, fund) for t in df.itertuples(index=False)],
)
+ conn.commit()
+
+
+def get_survival_curves(date):
+ with serenitas_pool.connection() as conn:
+ surv_curves = load_all_curves(conn, date)
+ surv_curves["spread"] = surv_curves["curve"].apply(
+ lambda sc: sc.inspect()["data"][5][1] * (1 - sc.recovery_rates[5])
+ )
+ return surv_curves.groupby(level=0).first()[["name", "company_id", "spread"]]
+
+
+def gen_jtd(portf, survival_curves):
+ portf = deepcopy(portf)
+ jtd = portf.jtd_single_names()
+ jtd = jtd[[jtd.columns[0]]].join(survival_curves)
+ jtd.columns = ["jtd", "name", "company_id", "5yr_spread"]
+ jtd = jtd.groupby(["company_id", "name"]).sum()
+ return jtd.reset_index()
+
+
+def save_jtd(conn, date, df, fund):
+ with conn.cursor() as c:
+ c.execute(
+ "DELETE FROM jtd_risks WHERE fund=%s AND date=%s",
+ (
+ fund,
+ date,
+ ),
+ )
+ conn.commit()
+ c.executemany(
+ "INSERT INTO jtd_risks VALUES (%s, %s, %s, %s, %s, %s)",
+ [(date, fund, *t) for t in df.itertuples(index=False)],
+ )
+ conn.commit()
if __name__ == "__main__":
- parser = argparse.ArgumentParser(description="Shock data and insert into DB")
+ parser = argparse.ArgumentParser(
+ description="Shock data/ calculate JTD and insert into DB"
+ )
parser.add_argument(
"date",
nargs="?",
@@ -105,7 +148,27 @@ if __name__ == "__main__":
parser.add_argument("-n", "--no-upload", action="store_true", help="do not upload")
args = parser.parse_args()
conn = dbconn("dawndb")
- for fund in ("SERCGMAST", "BOWDST", "ISOSEL", "BRINKER"):
- portf, _ = build_portfolio(args.date, args.date, fund)
- shocks = gen_shocks(portf, args.date, fund)
- save_shocks(args.date, shocks, fund)
+ survival_curves = get_survival_curves(args.date)
+ logging.basicConfig(
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ level=logging.INFO,
+ )
+ logger = logging.getLogger(__name__)
+ for fund in (
+ "SERCGMAST",
+ "BOWDST",
+ "ISOSEL",
+ "BRINKER",
+ ):
+ with warnings.catch_warnings():
+ warnings.filterwarnings(
+ "ignore", message="pandas only supports SQLAlchemy connectable"
+ )
+ warnings.filterwarnings("ignore", message="skipped 1 empty curves")
+ portf, _ = build_portfolio(args.date, args.date, fund)
+ shocks = gen_shocks(portf, args.date, fund)
+ save_shocks(conn, args.date, shocks, fund)
+ logger.info(f"{args.date}: {fund} Shocks Done")
+ jtd = gen_jtd(portf, survival_curves)
+ save_jtd(conn, args.date, jtd, fund)
+ logger.info(f"{args.date}: {fund} JTD Done")