aboutsummaryrefslogtreecommitdiffstats
path: root/python/risk/bonds.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/risk/bonds.py')
-rw-r--r--python/risk/bonds.py41
1 files changed, 30 insertions, 11 deletions
diff --git a/python/risk/bonds.py b/python/risk/bonds.py
index ff0823c2..23626c3b 100644
--- a/python/risk/bonds.py
+++ b/python/risk/bonds.py
@@ -1,5 +1,6 @@
import pandas as pd
import numpy as np
+import analytics
from enum import Enum, auto
from utils.db import dbengine
@@ -14,35 +15,43 @@ class AssetClass(Enum):
CRT = auto()
-def get_df(date, engine):
+def get_df(date, engine, zero_factor=False):
+ if zero_factor:
+ normalization = "unnormalized"
+ table1 = "priced_orig_ntl"
+ table2 = "priced_percentiles_orig_ntl"
+ else:
+ normalization = "current_notional"
+ table1 = "priced"
+ table2 = "priced_percentiles"
df_prices = pd.read_sql_query(
"SELECT cusip, model_version, pv, modDur, delta_yield, "
"wal, pv_io, pv_po, pv_RnW, delta_ir_io, delta_ir_po, "
"delta_hpi, delta_RnW, delta_mult, delta_ir, pv_FB "
- "FROM priced "
+ f"FROM {table1} "
"JOIN model_versions USING (model_id_sub) "
"JOIN model_versions_nonagency USING (model_id_sub) "
"JOIN simulations_nonagency USING (simulation_id) "
"WHERE timestamp BETWEEN %s AND date_add(%s, INTERVAL 1 DAY) "
"AND description='normal' "
- "AND normalization='current_notional'",
+ "AND normalization=%s",
engine,
["cusip", "model_version"],
- params=(date, date),
+ params=(date, date, normalization),
)
df_percentiles = pd.read_sql_query(
"SELECT cusip, PV, percentile "
- "FROM priced_percentiles "
+ f"FROM {table2} "
"JOIN model_versions USING (model_id_sub) "
"JOIN model_versions_nonagency USING (model_id_sub) "
"JOIN simulations_nonagency USING (simulation_id) "
"WHERE timestamp BETWEEN %s AND date_add(%s, INTERVAL 1 DAY) "
"AND model_version=3 "
"AND percentile IN (5, 25, 50, 75, 95) "
- "AND normalization='current_notional'",
+ "AND normalization=%s",
engine,
["cusip", "percentile"],
- params=(date, date),
+ params=(date, date, normalization),
)
df_prices = df_prices.unstack("model_version")
df_percentiles = df_percentiles.unstack("percentile")
@@ -50,7 +59,9 @@ def get_df(date, engine):
def subprime_risk(date, conn, engine):
- df = get_df(date, engine)
+ df = get_df(date, engine, False)
+ df_zero = get_df(date, engine, True)
+ df.loc[df_zero.index] = df_zero
df_pos = get_portfolio(date, conn, AssetClass.Subprime)
df_pv = df.xs("pv", axis=1, level=0)
df_pv.columns = ["pv1", "pv2", "pv3"]
@@ -92,18 +103,26 @@ def subprime_risk(date, conn, engine):
df_calc = df_pos.join(df_risk)
yc = YC(evaluation_date=date)
df_calc = df_calc.assign(
- b_yield=df_calc.modDur.apply(
+ bond_yield=df_calc.modDur.apply(
lambda x: x if np.isnan(x) else float(yc.zero_rate(x))
),
delta_ir=df_calc.delta_ir_io + df_calc.delta_ir_po,
curr_ntl=df_calc.notional * df_calc.factor,
+ # assume beta and ontr is initialized from analytics
+ hy_equiv=(
+ df_calc.delta_yield
+ / analytics._ontr.DV01
+ * analytics._beta["SUBPRIME"]
+ * 1e5
+ * df_calc.local_market_value
+ / df_calc.pv3
+ ),
)
- df_calc.b_yield += (
+ df_calc.bond_yield += (
np.log(df_calc.pv1 * df_calc.curr_ntl / df_calc.local_market_value)
/ df_calc.modDur
).clip_upper(1.0)
# delta scaled by ratio of market_value to model value
- df_calc.delta_yield *= df_calc.local_market_value / df_calc.pv3
df_calc.delta_ir *= df_calc.local_market_value / df_calc.pv3
return df_calc