aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/reto.py147
1 files changed, 147 insertions, 0 deletions
diff --git a/python/reto.py b/python/reto.py
new file mode 100644
index 00000000..47312717
--- /dev/null
+++ b/python/reto.py
@@ -0,0 +1,147 @@
+import datetime
+import globeop_reports as go
+import pandas as pd
+import numpy as np
+import argparse
+
+from pandas.tseries.offsets import BDay, MonthEnd, BMonthEnd, CustomBusinessMonthEnd
+
+from risk.bonds import subprime_risk, crt_risk, clo_risk
+from risk.portfolio import build_portfolio, generate_vol_surface
+from pnl_explain import get_bond_pv
+
+import serenitas.analytics as ana
+from serenitas.analytics.index_data import load_all_curves
+from serenitas.analytics.scenarios import run_portfolio_scenarios
+from serenitas.analytics.basket_index import BasketIndex
+from serenitas.analytics.base import Trade
+from serenitas.utils.db2 import dbconn
+from serenitas.utils.pool import serenitas_pool, dawn_pool
+from serenitas.utils.db import serenitas_engine, dawn_engine
+from serenitas.analytics.dates import prev_business_day
+
+
+def parse_args():
+ """Parses command line arguments"""
+ parser = argparse.ArgumentParser(description="Shock data and insert into DB")
+ parser.add_argument(
+ "date",
+ nargs="?",
+ type=datetime.date.fromisoformat,
+ default=prev_business_day(datetime.date.today()),
+ )
+ parser.add_argument("-n", "--no-upload", action="store_true", help="do not upload")
+ return parser.parse_args()
+
+
+def gen_spreads(shock_date, fund):
+ Trade.init_ontr(shock_date)
+ ana._local = False
+ spread_shock = np.array([-100.0, -25.0, 1.0, +25.0, 100.0, 200.0, 500, 1000])
+ spread_shock /= Trade._ontr["HY"].spread
+ portf, _ = build_portfolio(shock_date, shock_date, fund)
+ vol_surface = generate_vol_surface(portf, 10, "BAML")
+ portf.reset_pv()
+ scens = run_portfolio_scenarios(
+ portf,
+ date_range=[pd.Timestamp(shock_date)],
+ params=["pnl", "hy_equiv"],
+ spread_shock=spread_shock,
+ vol_shock=[0.0],
+ corr_shock=[0.0],
+ vol_surface=vol_surface,
+ )
+
+ strategies = {}
+ strategies["options"] = [
+ "HYOPTDEL",
+ "HYPAYER",
+ "HYREC",
+ "IGOPTDEL",
+ "IGPAYER",
+ "IGREC",
+ ]
+ strategies["tranches"] = [
+ "HYSNR",
+ "HYMEZ",
+ "HYINX",
+ "HYEQY",
+ "IGSNR",
+ "IGMEZ",
+ "IGINX",
+ "IGEQY",
+ "EUSNR",
+ "EUMEZ",
+ "EUINX",
+ "EUEQY",
+ "XOSNR",
+ "XOMEZ",
+ "XOINX",
+ "XOEQY",
+ "BSPK",
+ ]
+ if fund == "BRINKER":
+ scens = scens.xs(0, level="corr_shock")
+ else:
+ scens = scens.xs((0.0, 0.0), level=["vol_shock", "corr_shock"])
+
+ scens.columns.names = ["strategy", "trade_id", "scen_type"]
+
+ results = {}
+ for i, g in scens.groupby(level="scen_type", axis=1):
+ temp = g.groupby(level="strategy", axis=1).sum()
+ for key, item in strategies.items():
+ exist_columns = set(temp.columns).intersection(item)
+ temp[key] = temp[exist_columns].sum(axis=1)
+ temp.drop(exist_columns, axis=1, inplace=True)
+ temp["total"] = temp.sum(axis=1)
+ results[i] = temp
+ results = pd.concat(results)
+ return results
+
+
+def transform_df(df):
+ df = df.rename(
+ columns={
+ "spread_shock": "spread_shock",
+ "CASH_BASIS": "CASH_BASIS",
+ "HEDGE_CLO": "HEDGE_CLO",
+ "TEST": "TEST",
+ }
+ )
+ df = df.reset_index().rename(columns={"level_0": "unit"})
+
+ # Get the list of strategy columns by slicing the `df.columns` array
+ strategy_columns = df.columns[3:]
+
+ # Create a new dataframe with the desired format, using the dynamic list of strategy columns
+ df = pd.melt(
+ df, id_vars=["unit", "date", "spread_shock"], value_vars=strategy_columns
+ )
+
+ # Rename the columns to the desired names
+ df = df.rename(
+ columns={"variable": "strategy", "value": "value", "unit": "output_type"}
+ )
+
+ # Print the new dataframe to check
+ return df
+
+
+if __name__ == "__main__":
+ args = parse_args()
+ conn = dbconn("dawndb")
+ for fund in ("SERCGMAST", "BOWDST", "ISOSEL", "BRINKER"):
+ results = gen_spreads(args.date, fund)
+ with conn.cursor() as c:
+ c.execute(
+ "DELETE FROM shocks WHERE fund=%s and date=%s",
+ (
+ fund,
+ args.date,
+ ),
+ )
+ conn.commit()
+ df = transform_df(results)
+ df["fund"] = fund
+ df.to_sql("shocks", dawn_engine, if_exists="append", index=False)