aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/build_default_table.py3
-rw-r--r--python/collateral_calc.py7
-rw-r--r--python/markit_red.py6
-rw-r--r--python/thetas-durations.py3
-rw-r--r--python/yieldcurve.py11
5 files changed, 13 insertions, 17 deletions
diff --git a/python/build_default_table.py b/python/build_default_table.py
index 7a4318b2..56f1cf51 100644
--- a/python/build_default_table.py
+++ b/python/build_default_table.py
@@ -1,6 +1,6 @@
import pandas as pd
from bbg_helpers import init_bbg_session, BBG_IP, retrieve_data
-from db import dbengine
+from db import serenitas_engine
indices = ["CDX HY CDSI S19 5Y", "ITRX XOVER CDSI S25 5Y",
"CDX HY CDSI S15 5Y", "ITRX XOVER CDSI S20 5Y",
"CDX HY CDSI S25 5Y",
@@ -15,5 +15,4 @@ df = df[~df.duplicated(['Company ID'])]
breakpoint()
df = df[['Company ID', 'Auction Date', 'Event Date', 'CDS Recovery Rate']]
df.columns = ['id', 'auction_date', 'event_date', 'recovery']
-serenitas_engine = dbengine('serenitasdb')
df.to_sql('defaulted', serenitas_engine, if_exists='append', index=False)
diff --git a/python/collateral_calc.py b/python/collateral_calc.py
index 7b3ab6d3..de9f0960 100644
--- a/python/collateral_calc.py
+++ b/python/collateral_calc.py
@@ -2,7 +2,7 @@ import os
import logging
import pandas as pd
-from db import dbengine
+from db import dawn_engine
from exchange import ExchangeMessage
from exchangelib import HTMLBody
from pathlib import Path
@@ -147,7 +147,6 @@ def download_citi_emails(count=20):
p.write_bytes(attach.content)
def baml_collateral(d):
- dawn_engine = dbengine("dawndb")
df = pd.read_csv(DAILY_DIR / "BAML_reports" /
f"OTC_Open_Positions_-_Credit_-_LMCG_{d:%Y%m%d}.CSV",
usecols=['MTM', 'ACCRUEDCPN', 'VARMARGIN', 'REDCODE',
@@ -229,7 +228,7 @@ def sg_collateral(d):
df = df.groupby(['index_type', 'series', 'tenor'])['Collateral'].sum()
positions = pd.read_sql_query("SELECT security_desc, folder, notional, currency "
"FROM list_cds_positions_by_strat(%s)",
- dbengine("dawndb"), params=(d.date(),))
+ dawn_engine, params=(d.date(),))
instruments = positions.security_desc.str.split(expand=True)[[1, 3, 4]]
instruments.columns = ['index_type', 'series', 'tenor']
instruments.series = instruments.series.str.extract("S(\d+)")
@@ -259,7 +258,7 @@ def sg_collateral(d):
'Currency': 'currency'})
df_margin['date'] = d
try:
- df_margin.to_sql("fcm_im", dbengine("dawndb"), if_exists='append', index=False)
+ df_margin.to_sql("fcm_im", dawn_engine, if_exists='append', index=False)
except IntegrityError:
pass
return df
diff --git a/python/markit_red.py b/python/markit_red.py
index 290a45cd..831ba90b 100644
--- a/python/markit_red.py
+++ b/python/markit_red.py
@@ -6,7 +6,7 @@ import shutil
import zipfile
from pathlib import Path
from lxml import etree
-from db import with_connection, dbengine
+from db import serenitas_engine
import pandas as pd
def request_payload(payload):
@@ -37,8 +37,7 @@ def download_report(report):
else:
yield request_payload(payload)
-@with_connection('serenitasdb')
-def update_redcodes(conn, fname):
+def update_redcodes(fname):
with open(os.path.join(os.environ['BASE_DIR'], "Tranche_data", "RED_reports", fname)) as fh:
et = etree.parse(fh)
data_version = []
@@ -56,7 +55,6 @@ def update_redcodes(conn, fname):
df_version['activeversion'] = df_version['activeversion'].map({'Y': True, None: False})
df_maturity.tenor = df_maturity['tenor'].map(lambda s: s.lower() + 'r')
df_maturity.coupon = (pd.to_numeric(df_maturity.coupon) * 10000).astype(int)
- serenitas_engine = dbengine('serenitasdb')
df_version.to_sql("index_version_markit", serenitas_engine, index=False, if_exists='append')
df_maturity.to_sql("index_maturity_markit", serenitas_engine, index=False, if_exists='append')
diff --git a/python/thetas-durations.py b/python/thetas-durations.py
index 07e6cb03..b3d2e7cf 100644
--- a/python/thetas-durations.py
+++ b/python/thetas-durations.py
@@ -5,12 +5,11 @@ from analytics.utils import tenor_t
from pandas.tseries.offsets import BDay
from dateutil.relativedelta import relativedelta
from yieldcurve import get_curve
-from db import dbengine
+from db import serenitas_engine
from pyisda.legs import FeeLeg, ContingentLeg
from pyisda.curve import SpreadCurve
from pyisda.date import previous_twentieth
-serenitas_engine = dbengine('serenitasdb')
tenors = {"IG": ("3yr", "5yr", "7yr", "10yr"),
"HY": ("3yr", "5yr", "7yr"),
"EU": ("3yr", "5yr", "7yr", "10yr"),
diff --git a/python/yieldcurve.py b/python/yieldcurve.py
index 007115e0..230a7acd 100644
--- a/python/yieldcurve.py
+++ b/python/yieldcurve.py
@@ -18,7 +18,7 @@ from quantlib.time.date import pydate_from_qldate
import numpy as np
from quantlib.quotes import SimpleQuote
-from db import dbconn, dbengine
+from db import serenitas_engine
from pyisda.curve import YieldCurve
from pyisda.date import BadDay
import warnings
@@ -30,7 +30,7 @@ def load_curves(currency="USD", date=None):
if date:
sql_str += " WHERE effective_date=%s"
- with closing(dbconn('serenitasdb')) as conn:
+ with closing(serenitas_engine.raw_connection()) as conn:
with conn.cursor() as c:
if date:
c.execute(sql_str, (date,))
@@ -226,8 +226,8 @@ def build_curves(currency="USD"):
isda_ibor = IborIndex("IsdaIbor", Period(6, Months), 2, EURCurrency(),
calendar, ModifiedFollowing, False, Actual360())
fix_freq = Annual
- engine = dbengine('serenitasdb')
- rates = pd.read_sql_table('{}_rates'.format(currency.lower()), engine,
+ rates = pd.read_sql_table('{}_rates'.format(currency.lower()),
+ serenitas_engine,
index_col='effective_date')
quotes = [SimpleQuote() for c in rates.columns]
gen = zip(quotes, rates.columns)
@@ -237,7 +237,7 @@ def build_curves(currency="USD"):
fix_freq, ModifiedFollowing,
Thirty360(), isda_ibor) for q, t in gen]
sql_str = f"INSERT INTO {currency}_curves VALUES(%s, %s) ON CONFLICT DO NOTHING"
- conn = dbconn('serenitasdb')
+ conn = serenitas_engine.raw_connection()
for effective_date, curve_data in rates.iterrows():
print(effective_date)
settings.evaluation_date = Date.from_datetime(effective_date)
@@ -253,6 +253,7 @@ def build_curves(currency="USD"):
with conn.cursor() as c:
c.execute(sql_str, (effective_date, lz4.block.compress(jp_yc.__getstate__())))
conn.commit()
+ conn.close()
if __name__ == "__main__":