aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--python/analytics/basket_index.py6
-rw-r--r--python/analytics/curve_trades.py21
-rw-r--r--python/analytics/db.py3
-rw-r--r--python/analytics/index.py9
-rw-r--r--python/analytics/index_data.py35
-rw-r--r--python/analytics/option.py9
-rw-r--r--python/analytics/tranche_basket.py43
-rw-r--r--python/db.py3
-rw-r--r--python/globeop_reports.py25
-rw-r--r--python/notebooks/Reto Report.ipynb23
10 files changed, 85 insertions, 92 deletions
diff --git a/python/analytics/basket_index.py b/python/analytics/basket_index.py
index 2505513b..4a5ce710 100644
--- a/python/analytics/basket_index.py
+++ b/python/analytics/basket_index.py
@@ -1,5 +1,5 @@
from .index_data import get_index_quotes, get_singlenames_curves
-from .db import _engine
+from .db import serenitas_engine
from .utils import tenor_t
from functools import partial
from pyisda.credit_index import CreditIndex
@@ -42,14 +42,14 @@ class BasketIndex(CreditIndex):
"issue_date "\
"FROM index_maturity " \
"WHERE index=%s AND series=%s",
- _engine,
+ serenitas_engine,
index_col='tenor',
params=(index_type, series),
parse_dates=['maturity', 'issue_date'])
if self.index_desc.empty:
raise ValueError(f"Index {index_type} {series} doesn't exist")
self._index_version = tuple(tuple(r.values()) for r in
- _engine.execute(
+ serenitas_engine.execute(
"SELECT lastdate,"
" indexfactor/100 AS factor,"
" cumulativeloss/100 AS cum_loss,"
diff --git a/python/analytics/curve_trades.py b/python/analytics/curve_trades.py
index 555194ba..73be4ab5 100644
--- a/python/analytics/curve_trades.py
+++ b/python/analytics/curve_trades.py
@@ -1,5 +1,5 @@
from analytics.index_data import get_index_quotes, index_returns
-from db import dbengine
+from db import serenitas_engine, dawn_engine
from analytics import CreditIndex, Portfolio
from analytics.utils import roll_date
from dateutil.relativedelta import relativedelta
@@ -16,12 +16,9 @@ import statsmodels.formula.api as smf
import numpy as np
import matplotlib.pyplot as plt
-serenitasdb = dbengine('serenitasdb')
-dawndb = dbengine('dawndb')
-
def on_the_run(index):
- r = serenitasdb.execute("SELECT max(series) FROM index_version WHERE index=%s",
- (index,))
+ r = serenitas_engine.execute("SELECT max(series) FROM index_version WHERE index=%s",
+ (index,))
series, = r.fetchone()
return series
@@ -151,9 +148,9 @@ def forward_loss(index='IG'):
"close_spread*duration / 100 AS indexel " \
"FROM index_quotes WHERE index=%s AND date >= %s " \
"ORDER BY date DESC, series ASC, duration ASC",
- serenitasdb, parse_dates=['date'], params=[index, start_date])
+ serenitase_engine, parse_dates=['date'], params=[index, start_date])
df1 = pd.read_sql_query("SELECT index, series, tenor, maturity FROM index_maturity",
- serenitasdb, parse_dates=['maturity'])
+ serenitas_engine, parse_dates=['maturity'])
df = df.merge(df1, on=['index','series','tenor'])
df = df.set_index(['date','index', 'maturity']).dropna()
@@ -277,7 +274,7 @@ def curve_pos(value_date, index_type='IG'):
"JOIN index_desc " \
"ON security_id=redindexcode AND " \
"index_desc.maturity=list_cds_positions.maturity"
- df = pd.read_sql_query(sql_string, dawndb,
+ df = pd.read_sql_query(sql_string, dawn_engine,
params=[value_date, f'SER_{index_type}CURVE'])
portf = Portfolio([CreditIndex(row.index, row.series, row.tenor,
@@ -301,11 +298,11 @@ def curve_shape(value_date, index='IG', percentile=.95, spread=None):
if spread is None:
sql_string = "SELECT closespread FROM index_quotes where index = %s " \
"and series = %s and tenor = %s and date = %s"
- spread_df = pd.read_sql_query(sql_string, serenitasdb,
+ spread_df = pd.read_sql_query(sql_string, serenitas_engine,
params=[index, series, '5yr', value_date])
spread = spread_df.iloc[0][0]
sql_string = "SELECT tenor, maturity FROM index_maturity where index = %s and series = %s"
- lookup_table = pd.read_sql_query(sql_string, serenitasdb, parse_dates=['maturity'],
+ lookup_table = pd.read_sql_query(sql_string, serenitas_engine, parse_dates=['maturity'],
params=[index, series])
df = curve_shape[steepness == steepness.quantile(percentile, 'nearest')]
@@ -350,7 +347,7 @@ def pos_pnl_abs(portf, value_date, index='IG', rolling=6, years=3):
df = df.groupby(level=['date', 'tenor']).nth(-1)['close_spread'].unstack(-1)
sql_string = "SELECT tenor, maturity FROM index_maturity where index = %s and series = %s"
- lookup_table = pd.read_sql_query(sql_string, serenitasdb, parse_dates=['maturity'],
+ lookup_table = pd.read_sql_query(sql_string, serenitas_engine, parse_dates=['maturity'],
params=[index, series])
lookup_table['year_frac'] = (lookup_table.maturity - pd.to_datetime(value_date)).dt.days/365
diff --git a/python/analytics/db.py b/python/analytics/db.py
index c8b9f385..4abfd813 100644
--- a/python/analytics/db.py
+++ b/python/analytics/db.py
@@ -1,2 +1 @@
-from db import dbengine, dbconn, DataError
-_engine = dbengine('serenitasdb')
+from db import serenitas_engine, dawn_engine, DataError
diff --git a/python/analytics/index.py b/python/analytics/index.py
index 56758693..da0063bc 100644
--- a/python/analytics/index.py
+++ b/python/analytics/index.py
@@ -4,7 +4,7 @@ import datetime
import pandas as pd
from .credit_default_swap import CreditDefaultSwap
-from .db import _engine, dbengine, DataError
+from .db import serenitas_engine, dawn_engine, DataError
from bbg_helpers import BBG_IP, retrieve_data, init_bbg_session
from pandas.tseries.offsets import BDay
from pyisda.curve import SpreadCurve
@@ -58,7 +58,7 @@ class CreditIndex(CreditDefaultSwap):
raise ValueError("Not enough information to load the index.")
try:
df = pd.read_sql_query(sql_str,
- _engine,
+ serenitas_engine,
parse_dates=['lastdate', 'issue_date'],
params=params)
maturity = df.maturity[0]
@@ -96,8 +96,7 @@ class CreditIndex(CreditDefaultSwap):
@classmethod
def from_tradeid(cls, trade_id):
- engine = dbengine('dawndb')
- r = engine.execute("""
+ r = dawn_engine.execute("""
SELECT * FROM cds
LEFT JOIN index_desc
ON security_id = redindexcode AND cds.maturity = index_desc.maturity
@@ -143,7 +142,7 @@ class CreditIndex(CreditDefaultSwap):
ref_data = retrieve_data(session, [security], field)
self.ref = ref_data[security][field]
else:
- run = _engine.execute("""SELECT * FROM index_quotes
+ run = serenitas_engine.execute("""SELECT * FROM index_quotes
WHERE index=%s AND series=%s AND tenor=%s AND date=%s""",
(self.index_type, self.series, self.tenor, self.value_date))
rec = run.fetchone()
diff --git a/python/analytics/index_data.py b/python/analytics/index_data.py
index 3a588e07..0fd06ff0 100644
--- a/python/analytics/index_data.py
+++ b/python/analytics/index_data.py
@@ -1,4 +1,4 @@
-from .db import _engine, dbconn
+from .db import serenitas_engine
from dates import bond_cal
import numpy as np
@@ -32,18 +32,20 @@ def insert_quotes():
_engine, parse_dates=['date'], index_col=['date'])
df = df.loc[dates]
for tup in df.itertuples():
- result = _engine.execute("SELECT indexfactor, cumulativeloss FROM index_version "
- "WHERE index = 'HY' AND series=%s AND version in (%s, %s)"
- "ORDER BY version",
- (tup.series, tup.version, tup.version+1))
+ result = serenitas_engine.execute(
+ "SELECT indexfactor, cumulativeloss FROM index_version "
+ "WHERE index = 'HY' AND series=%s AND version in (%s, %s)"
+ "ORDER BY version",
+ (tup.series, tup.version, tup.version+1))
factor1, cumloss1 = result.fetchone()
factor2, cumloss2 = result.fetchone()
recovery = 1-(cumloss2-cumloss1)
- version2_price = (factor1 * tup.closeprice - 100*recovery)/factor2
+ version2_price = (factor1 * tup.closeprice - 100 * recovery)/factor2
print(version2_price)
- _engine.execute("INSERT INTO index_quotes(date, index, series, version, tenor, closeprice)"
- "VALUES(%s, %s, %s, %s, %s, %s)",
- (tup.Index, 'HY', tup.series, tup.version+1, tup.tenor, version2_price))
+ serenitas_engine.execute(
+ "INSERT INTO index_quotes(date, index, series, version, tenor, closeprice)"
+ "VALUES(%s, %s, %s, %s, %s, %s)",
+ (tup.Index, 'HY', tup.series, tup.version+1, tup.tenor, version2_price))
def get_index_quotes(index=None, series=None, tenor=None, from_date=None,
@@ -77,7 +79,7 @@ def get_index_quotes(index=None, series=None, tenor=None, from_date=None,
return {k: tuple(v) if isinstance(v, list) else v
for k, v in args.items() if v is not None}
- df = pd.read_sql_query(sql_str, _engine, parse_dates=['date'],
+ df = pd.read_sql_query(sql_str, serenitas_engine, parse_dates=['date'],
index_col=['date', 'index', 'series', 'version'],
params=make_params(args))
df.tenor = df.tenor.astype(tenor_t)
@@ -128,7 +130,7 @@ def index_returns(df=None, index=None, series=None, tenor=None, from_date=None,
coupon_data = pd.read_sql_query("SELECT index, series, tenor, coupon * 1e-4 AS coupon, "
"maturity FROM "
"index_maturity WHERE coupon is NOT NULL",
- _engine,
+ serenitas_engine,
index_col=['index', 'series', 'tenor'])
df = df.reset_index('date').join(coupon_data).reset_index('tenor')
# for some reason pandas doesn't keep the categories, so we have to
@@ -145,11 +147,9 @@ def index_returns(df=None, index=None, series=None, tenor=None, from_date=None,
def get_singlenames_quotes(indexname, date, tenors):
- conn = dbconn('serenitasdb')
- with conn.cursor() as c:
- c.execute("SELECT * FROM curve_quotes2(%s, %s, %s)",
- vars=(indexname, date, list(tenors)))
- return list(c)
+ r = serenitas_engine.execute("SELECT * FROM curve_quotes2(%s, %s, %s)",
+ (indexname, date, list(tenors)))
+ return list(r)
def build_curve(r, tenors, currency="USD"):
@@ -203,7 +203,8 @@ def get_singlenames_curves(index_type, series, trade_date,
def get_tranche_quotes(index_type, series, tenor, date=datetime.date.today()):
- conn = dbconn('serenitasdb')
+ conn = serenitas_engine.raw_connection()
with conn.cursor() as c:
c.callproc("get_tranche_quotes", (index_type, series, tenor, date))
return pd.DataFrame.from_records(dict(d) for d in c)
+ conn.close()
diff --git a/python/analytics/option.py b/python/analytics/option.py
index f75e0a93..c423f15c 100644
--- a/python/analytics/option.py
+++ b/python/analytics/option.py
@@ -5,13 +5,11 @@ import numpy as np
import pandas as pd
import analytics
-from db import dbengine
-
from .black import black, Nx
from .sabr import sabr
from .utils import GHquad, build_table
from .index import g, ForwardIndex, CreditIndex
-from .db import _engine
+from .db import serenitas_engine, dawn_engine
from .utils import memoize
from pandas.tseries.offsets import BDay
@@ -79,8 +77,7 @@ class BlackSwaption(ForwardIndex):
@classmethod
def from_tradeid(cls, trade_id, index=None):
- engine = dbengine('dawndb')
- r = engine.execute("SELECT * from swaptions WHERE id=%s", (trade_id,))
+ r = dawn_engine.execute("SELECT * from swaptions WHERE id=%s", (trade_id,))
rec = r.fetchone()
if rec is None:
return ValueError("trade_id doesn't exist")
@@ -493,7 +490,7 @@ class QuoteSurface():
"WHERE quotedate::date = %s AND index= %s AND series = %s "
"AND quote_source != 'SG' "
"ORDER BY quotedate, strike",
- _engine,
+ serenitas_engine,
parse_dates=['quotedate', 'expiry'],
params=(value_date, index_type.upper(), series))
self._quote_is_price = index_type == "HY"
diff --git a/python/analytics/tranche_basket.py b/python/analytics/tranche_basket.py
index 0a4ae8ae..64fc03dd 100644
--- a/python/analytics/tranche_basket.py
+++ b/python/analytics/tranche_basket.py
@@ -6,7 +6,7 @@ from .tranche_functions import (
from .index_data import get_tranche_quotes
from .utils import memoize, build_table
from collections import namedtuple
-from db import dbconn
+from .db import dawn_engine, serenitas_engine
from copy import deepcopy
from lru import LRU
from pyisda.date import cds_accrued
@@ -80,13 +80,11 @@ class DualCorrTranche():
@classmethod
def from_tradeid(cls, trade_id):
- with dbconn('dawndb') as conn:
- with conn.cursor() as c:
- c.execute("SELECT * FROM cds LEFT JOIN index_desc "
- "ON security_id = redindexcode AND "
- "cds.maturity = index_desc.maturity "
- "WHERE id=%s", (trade_id,))
- rec = c.fetchone()
+ r = dawn_engine.execute("SELECT * FROM cds LEFT JOIN index_desc "
+ "ON security_id = redindexcode AND "
+ "cds.maturity = index_desc.maturity "
+ "WHERE id=%s", (trade_id,))
+ rec = r.fetchone()
instance = cls(rec['index'], rec['series'], rec['tenor'],
attach=rec['attach'],
detach=rec['detach'],
@@ -265,20 +263,21 @@ class DualCorrTranche():
"WHERE a.index=%s AND a.series=%s AND a.tenor=%s "
"AND quotedate::date=%s "
"AND (a.detach = %s OR a.attach = %s) ORDER BY a.attach")
- with dbconn('serenitasdb') as conn:
- with conn.cursor() as c:
- c.execute(sql_string, (self.index_type, self.series,
- self.tenor, self.value_date,
- self.attach, self.attach))
- if self.attach == 0:
- self._tranche_id, self.rho[1] = next(c)
- elif self.detach == 100:
- self._tranche_id, self.rho[0] = next(c)
- else:
- self.rho = []
- for tranche_id, corr in c:
- self.rho.append(corr)
- self._tranche_id = tranche_id
+ conn = serenitas_engine.raw_connection()
+ with conn.cursor() as c:
+ c.execute(sql_string, (self.index_type, self.series,
+ self.tenor, self.value_date,
+ self.attach, self.attach))
+ if self.attach == 0:
+ self._tranche_id, self.rho[1] = next(c)
+ elif self.detach == 100:
+ self._tranche_id, self.rho[0] = next(c)
+ else:
+ self.rho = []
+ for tranche_id, corr in c:
+ self.rho.append(corr)
+ self._tranche_id = tranche_id
+ conn.close()
@property
def tranche_factor(self):
diff --git a/python/db.py b/python/db.py
index 43405eed..3535ad1f 100644
--- a/python/db.py
+++ b/python/db.py
@@ -90,3 +90,6 @@ serenitas_pool = SimpleConnectionPool(2, 5, database='serenitasdb',
user='serenitas_user',
host=os.environ.get("PGHOST", "debian"),
cursor_factory=None)
+
+serenitas_engine = dbengine('serenitasdb')
+dawn_engine = dbengine('dawndb')
diff --git a/python/globeop_reports.py b/python/globeop_reports.py
index 965649e1..4d89f081 100644
--- a/python/globeop_reports.py
+++ b/python/globeop_reports.py
@@ -1,4 +1,4 @@
-from db import dbengine
+from db import dbengine, dawn_engine
from pandas.tseries.offsets import DateOffset, MonthEnd
from yieldcurve import YC
from quantlib.time.api import Date, Months, Period
@@ -9,12 +9,10 @@ import pandas as pd
import numpy as np
import datetime
-etengine = dbengine('etdb')
-dawnengine = dbengine('dawndb')
def get_monthly_pnl(group_by=['identifier']):
sql_string = "SELECT * FROM pnl_reports"
- df_pnl = pd.read_sql_query(sql_string, dawnengine,
+ df_pnl = pd.read_sql_query(sql_string, dawn_engine,
parse_dates=['date'],
index_col=['date'])
df_pnl['identifier'] = df_pnl.invid.str.replace("_A$", "")
@@ -26,11 +24,11 @@ def get_monthly_pnl(group_by=['identifier']):
def get_portfolio(report_date=None):
if report_date is not None:
sql_string = "SELECT * FROM valuation_reports where periodenddate = %s"
- df = pd.read_sql_query(sql_string, dawnengine, parse_dates=['periodenddate'],
+ df = pd.read_sql_query(sql_string, dawn_engine, parse_dates=['periodenddate'],
index_col=['periodenddate'], params=[report_date,])
else:
sql_string = "SELECT * FROM valuation_reports"
- df = pd.read_sql_query(sql_string, dawnengine, parse_dates=['periodenddate'],
+ df = pd.read_sql_query(sql_string, dawn_engine, parse_dates=['periodenddate'],
index_col=['periodenddate'])
df['identifier'] = df.invid.str.replace("_A$", "")
return df
@@ -38,7 +36,7 @@ def get_portfolio(report_date=None):
def trade_performance():
sql_string = "SELECT * FROM bonds"
- df_trades = pd.read_sql_query(sql_string, dawnengine,
+ df_trades = pd.read_sql_query(sql_string, dawn_engine,
parse_dates={'lastupdate': {'utc': True},
'trade_date': {},
'settle_date': {}})
@@ -77,7 +75,7 @@ def trade_performance():
def get_net_navs():
sql_string = "SELECT * FROM valuation_reports"
- df_val = pd.read_sql_query(sql_string, dawnengine, parse_dates=['periodenddate'])
+ df_val = pd.read_sql_query(sql_string, dawn_engine, parse_dates=['periodenddate'])
nav = df_val[df_val.fund == 'SERCGMAST'].groupby('periodenddate')['endbooknav'].sum()
nav = nav.resample('M').last()
df = pd.read_csv('/home/serenitas/edwin/Python/subscription_fee_data.csv',
@@ -153,7 +151,7 @@ def rmbs_pos(date, model_date=None, dm=False):
"model_version = 1 and "
"date(timestamp) < %s and date(timestamp) > %s "
"order by timestamp desc")
- timestamps = pd.read_sql_query(sql_string, dawnengine, parse_dates=['timestamp'],
+ timestamps = pd.read_sql_query(sql_string, dawn_engine, parse_dates=['timestamp'],
params=[date, date - DateOffset(15, 'D')])
if model_date is None:
model_date = (timestamps.loc[0][0]).date()
@@ -166,12 +164,12 @@ def rmbs_pos(date, model_date=None, dm=False):
"and model_version <> 2")
params_list = (model_date,)
if date > datetime.date(2017, 9, 30):
- r = dawnengine.execute("SELECT latest_sim FROM latest_sim(%s)",
+ r = dawn_engine.execute("SELECT latest_sim FROM latest_sim(%s)",
model_date)
model_id, = next(r)
sql_string += " AND model_id_sub = %s"
params_list += (model_id,)
- model = pd.read_sql_query(sql_string, dawnengine, parse_dates=['timestamp'],
+ model = pd.read_sql_query(sql_string, dawn_engine, parse_dates=['timestamp'],
params=params_list)
model = model[model['pv'] != 0]
pos = pos.assign(curr_ntl = pos.endbookmv/pos.endlocalmarketprice *100)
@@ -211,13 +209,14 @@ def clo_pos(date):
return df
else:
sql_string = "select distinct cusip, identifier from bonds where asset_class = 'CLO'"
- cusip_map = {r['identifier']: r['cusip'] for r in dawnengine.execute(sql_string)}
+ cusip_map = {identifier: cusip for cusip, identifier in dawn_engine.execute(sql_string)}
df['cusip'] = df['identifier'].replace(cusip_map)
cusips = df.loc[[df.index[-1]], 'cusip']
placeholders = ",".join(["%s"] * (1 + len(cusips)))
sql_string = f"SELECT * FROM historical_cusip_risk({placeholders})"
+ etengine = dbengine('etdb')
model = pd.read_sql_query(sql_string, etengine, parse_dates=['pricingdate'],
- params=(date, *cusips))
+ params=(date, *cusips))
model.index = cusips
calc_df = df.loc[[df.index[-1]]].set_index('cusip').join(model)
calc_df['hy_equiv'] = calc_df.delta * calc_df.endbookmv
diff --git a/python/notebooks/Reto Report.ipynb b/python/notebooks/Reto Report.ipynb
index 4ced9d4d..cf823db0 100644
--- a/python/notebooks/Reto Report.ipynb
+++ b/python/notebooks/Reto Report.ipynb
@@ -19,11 +19,8 @@
"from analytics.index_data import get_index_quotes\n",
"from analytics.scenarios import run_portfolio_scenarios, run_swaption_scenarios\n",
"from analytics import Swaption, BlackSwaption, CreditIndex, BlackSwaptionVolSurface, Portfolio, ProbSurface, DualCorrTranche\n",
- "from db import dbconn, dbengine\n",
+ "from db import serenitas_engine, dawn_engine\n",
"\n",
- "conn = dbconn('dawndb')\n",
- "dawndb = dbengine('dawndb')\n",
- "serenitasdb = dbengine('serenitasdb')\n",
"analytics.init_ontr()"
]
},
@@ -71,8 +68,8 @@
"#Average Portfolio Sales Turnover - as of last monthend from today\n",
"#(total Bond Sales Proceeds + paydown)/average starting 12 months NAV\n",
"nav = go.get_net_navs()\n",
- "sql_string = \"SELECT * FROM bonds where buysell = 'False'\"\n",
- "df = pd.read_sql_query(sql_string, dbengine('dawndb'),\n",
+ "sql_string = \"SELECT * FROM bonds WHERE buysell IS False\"\n",
+ "df = pd.read_sql_query(sql_string, dawn_engine,\n",
" parse_dates={'lastupdate':'utc=True', 'trade_date':'', 'settle_date':''},\n",
" index_col = 'trade_date')\n",
"df = df.groupby(pd.Grouper(freq='M')).sum()\n",
@@ -84,10 +81,10 @@
" (portfolio.endqty != 0)]\n",
"portfolio = portfolio.set_index('identifier', append=True)\n",
"portfolio = portfolio['endqty'].groupby(['identifier', 'periodenddate']).sum()\n",
- "portfolio = portfolio.reset_index('identifier') \n",
- "sql_string = \"SELECT * from cashflow_history\"\n",
- "cf = pd.read_sql_query(sql_string, dbengine('dawndb'), parse_dates=['date'],\n",
- " index_col=['date']).sort_index()\n",
+ "portfolio = portfolio.reset_index('identifier')\n",
+ "cf = pd.read_sql_query(\"SELECT * FROM cashflow_history\", dawn_engine,\n",
+ " parse_dates=['date'],\n",
+ " index_col=['date']).sort_index()\n",
"df_1 = pd.merge_asof(cf, portfolio.sort_index(), left_index=True, right_index=True, by='identifier')\n",
"df_1 = df_1.dropna(subset=['endqty'])\n",
"df_1 = df_1[(df_1.principal_bal != 0) & (df_1.principal != 0)]\n",
@@ -149,6 +146,7 @@
" \"OVER (partition by security_id, attach) AS ntl_agg \"\n",
" \"FROM cds WHERE swap_type='CD_INDEX' AND termination_cp IS null \"\n",
" \"AND trade_date <= %s AND maturity > %s\")\n",
+ "conn = dawn_engine.raw_connection()\n",
"with conn.cursor() as c:\n",
" c.execute(t_sql_string, (position_date,))\n",
" t_trades = [[dealid, f\"{folder}_{dealid}\"] for dealid, folder, ntl in c if ntl != 0]\n",
@@ -156,6 +154,7 @@
" swaption_trades = [[dealid, f\"{folder}_{dealid}\", expiration_date] for dealid, folder, expiration_date in c]\n",
" c.execute(index_sql_string, (position_date, position_date))\n",
" index_trades = [[dealid, f\"{folder}_{dealid}\"] for dealid, folder, ntl in c if ntl != 0]\n",
+ "conn.close()\n",
" \n",
"portf = Portfolio([DualCorrTranche.from_tradeid(dealid) for dealid, _ in t_trades],\n",
" [trade_id for _, trade_id in t_trades])\n",
@@ -168,7 +167,7 @@
"#get bond risks:\n",
"rmbs_pos = go.rmbs_pos(position_date)\n",
"clo_pos = go.clo_pos(position_date)\n",
- "r = serenitasdb.execute(\"select duration from on_the_run where index = 'HY' and date = %s\",\n",
+ "r = serenitas_engine.execute(\"select duration from on_the_run where index = 'HY' and date = %s\",\n",
" spread_date)\n",
"duration, = next(r)\n",
"rmbs_pos['hy_equiv'] = rmbs_pos['delta_yield']/duration * 100\n",
@@ -229,7 +228,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.7.1"
+ "version": "3.7.2"
}
},
"nbformat": 4,