aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/reto.py43
1 files changed, 19 insertions, 24 deletions
diff --git a/python/reto.py b/python/reto.py
index bc6f978b..1f7bddd5 100644
--- a/python/reto.py
+++ b/python/reto.py
@@ -1,7 +1,3 @@
-import datetime
-import argparse
-import warnings
-import logging
import pandas as pd
import numpy as np
@@ -11,9 +7,6 @@ import serenitas.analytics as ana
from serenitas.analytics.scenarios import run_portfolio_scenarios
from serenitas.analytics.base import Trade
from serenitas.analytics.index_data import load_all_curves
-from serenitas.analytics.dates import prev_business_day
-from serenitas.utils.db2 import dbconn
-from serenitas.utils.pool import serenitas_pool
def gen_shocks(portf, shock_date, fund):
@@ -97,21 +90,19 @@ def save_shocks(conn, date, df, fund):
conn.commit()
-def get_survival_curves(date):
- with serenitas_pool.connection() as conn:
- surv_curves = load_all_curves(conn, date)
+def get_survival_curves(conn, date):
+ surv_curves = load_all_curves(conn, date)
surv_curves["spread"] = surv_curves["curve"].apply(
- lambda sc: sc.inspect()["data"][5][1] * (1 - sc.recovery_rates[5])
+ lambda sc: [h for d, h in sc][5] * (1 - sc.recovery_rates[5])
)
return surv_curves.groupby(level=0).first()[["name", "company_id", "spread"]]
def gen_jtd(portf, survival_curves):
jtd = portf.jtd_single_names()
- jtd = jtd[[jtd.columns[0]]].join(survival_curves)
- jtd.columns = ["jtd", "name", "company_id", "5yr_spread"]
- jtd = jtd.groupby(["company_id", "name"]).sum()
- return jtd.reset_index()
+ jtd = survival_curves.join(jtd.iloc[:, 0], how="right")
+ jtd.columns = ["name", "company_id", "5yr_spread", "jtd"]
+ return jtd.groupby(["company_id", "name"], as_index=False).sum()
def save_jtd(conn, date, df, fund):
@@ -125,13 +116,20 @@ def save_jtd(conn, date, df, fund):
)
conn.commit()
c.executemany(
- "INSERT INTO jtd_risks VALUES (%s, %s, %s, %s, %s, %s)",
+ "INSERT INTO jtd_risks(date, fund, company_id, name, five_year_spread, jtd) "
+ "VALUES (%s, %s, %s, %s, %s, %s)",
[(date, fund, *t) for t in df.itertuples(index=False)],
)
conn.commit()
if __name__ == "__main__":
+ import datetime
+ import argparse
+ import warnings
+ from serenitas.analytics.dates import prev_business_day
+ from serenitas.utils.db2 import dbconn
+
parser = argparse.ArgumentParser(
description="Shock data/ calculate JTD and insert into DB"
)
@@ -144,12 +142,9 @@ if __name__ == "__main__":
parser.add_argument("-n", "--no-upload", action="store_true", help="do not upload")
args = parser.parse_args()
conn = dbconn("dawndb")
- survival_curves = get_survival_curves(args.date)
- logging.basicConfig(
- format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
- level=logging.INFO,
- )
- logger = logging.getLogger(__name__)
+
+ survival_curves = get_survival_curves(Trade._conn, args.date)
+
for fund in (
"SERCGMAST",
"BOWDST",
@@ -164,7 +159,7 @@ if __name__ == "__main__":
portf, _ = build_portfolio(args.date, args.date, fund)
shocks = gen_shocks(portf, args.date, fund)
save_shocks(conn, args.date, shocks, fund)
- logger.info(f"{args.date}: {fund} Shocks Done")
+ print(f"{args.date}: {fund} Shocks Done")
jtd = gen_jtd(portf, survival_curves)
save_jtd(conn, args.date, jtd, fund)
- logger.info(f"{args.date}: {fund} JTD Done")
+ print(f"{args.date}: {fund} JTD Done")