aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/intex/load_indicative.py37
-rw-r--r--python/intex/load_intex_collateral.py10
2 files changed, 25 insertions, 22 deletions
diff --git a/python/intex/load_indicative.py b/python/intex/load_indicative.py
index 1c480729..c8134a42 100644
--- a/python/intex/load_indicative.py
+++ b/python/intex/load_indicative.py
@@ -14,12 +14,13 @@ def convertToNone(s):
return None if s in ["", "-", "NR"] else s
-def insert_new_cusip(conn, line):
+def insert_new_tranche(conn, line):
if line["Pari-Passu Tranches"]:
line["Pari-Passu Tranches"] = line["Pari-Passu Tranches"].split(",")
to_insert = (
line["CUSIP"],
line["ISIN"],
+ line["FIGI"],
line["Bloomberg Ticker"],
line["dealname"],
line["tranche"],
@@ -47,22 +48,22 @@ def insert_new_cusip(conn, line):
line["Type"],
)
sqlstr = (
- "INSERT INTO cusip_ref(Cusip, ISIN, bloomberg_ticker, dealname, "
+ "INSERT INTO tranche_ref(Cusip, ISIN, figi, bloomberg_ticker, dealname, "
"tranche, paripassu_tranches, Orig_Balance, Orig_Moody, Orig_Attach, "
"Orig_Detach, Floater_Index, Spread, type) VALUES({0}) "
- "RETURNING cusip_id".format(",".join(["%s"] * 13))
+ "RETURNING id".format(",".join(["%s"] * 14))
)
with conn.cursor() as c:
try:
c.execute(sqlstr, to_insert)
- (cusip_id,) = c.fetchone()
- return cusip_id
+ (tranche_id,) = c.fetchone()
+ return tranche_id
except psycopg2.DataError as e:
logger.error(e)
conn.commit()
-def upload_cusip_data(conn, filename: pathlib.Path):
+def upload_tranche_data(conn, filename: pathlib.Path):
dealupdate = {}
with filename.open("r") as fh:
dr = csv.DictReader(fh, dialect="excel-tab")
@@ -90,20 +91,20 @@ def upload_cusip_data(conn, filename: pathlib.Path):
except TypeError:
logging.error(f"deal:{dealname} not in database")
continue
- sqlstring = "SELECT cusip_id FROM cusip_ref WHERE cusip=%s and dealname=%s"
+ sqlstring = "SELECT id FROM tranche_ref WHERE figi=%s and dealname=%s"
with conn.cursor() as c:
- c.execute(sqlstring, (line["CUSIP"], dealname))
+ c.execute(sqlstring, (line["FIGI"], dealname))
r = c.fetchone()
if r is None:
try:
- cusip_id = insert_new_cusip(conn, line)
+ tranche_id = insert_new_tranche(conn, line)
except ValueError:
continue
else:
- cusip_id = r[0]
- sqlstring = "SELECT max(updatedate) FROM cusip_update WHERE cusip_id = %s"
+ tranche_id = r[0]
+ sqlstring = "SELECT max(updatedate) FROM tranche_update WHERE id = %s"
with conn.cursor() as c:
- c.execute(sqlstring, (cusip_id,))
+ c.execute(sqlstring, (tranche_id,))
(curr_date,) = c.fetchone()
if curr_date is None or curr_date < dealupdate[dealname]:
try:
@@ -120,11 +121,11 @@ def upload_cusip_data(conn, filename: pathlib.Path):
except ValueError:
continue
line["Curr Moody"] = line.get("Curr Moody") or line.get("Orig Moody")
- sqlstring = "INSERT INTO cusip_update VALUES({0})".format(
+ sqlstring = "INSERT INTO tranche_update VALUES({0})".format(
",".join(["%s"] * 8)
)
to_insert = (
- cusip_id,
+ tranche_id,
line["Curr Balance"],
line["Factor"],
line["Coupon"],
@@ -138,7 +139,7 @@ def upload_cusip_data(conn, filename: pathlib.Path):
c.execute(sqlstring, to_insert)
except (psycopg2.DataError, psycopg2.IntegrityError) as e:
logger.error(e)
- logger.debug("uploaded: {0}".format(line["CUSIP"]))
+ logger.debug("uploaded: {0}".format(line["FIGI"]))
conn.commit()
@@ -271,7 +272,7 @@ if __name__ == "__main__":
parser.add_argument("workdate", nargs="?", default=str(datetime.date.today()))
args = parser.parse_args()
- cusip_files = [
+ tranche_files = [
f for f in (DATA_DIR / "Indicative_").iterdir() if "TrInfo" in f.name
]
deal_files = [
@@ -283,5 +284,5 @@ if __name__ == "__main__":
for deal in deal_files:
upload_deal_data(etdb, deal)
# then load tranche data
- for cusip in cusip_files:
- upload_cusip_data(etdb, cusip)
+ for tranche in tranche_files:
+ upload_tranche_data(etdb, cusip)
diff --git a/python/intex/load_intex_collateral.py b/python/intex/load_intex_collateral.py
index 8e1c3253..78767aa5 100644
--- a/python/intex/load_intex_collateral.py
+++ b/python/intex/load_intex_collateral.py
@@ -4,7 +4,7 @@ import psycopg2
from .common import sanitize_float
from serenitas.utils.env import DATA_DIR
import uuid
-from .load_indicative import upload_cusip_data, upload_deal_data
+from .load_indicative import upload_tranche_data, upload_deal_data
import logging
logger = logging.getLogger(__name__)
@@ -23,6 +23,7 @@ fields = [
"Second Lien",
"LoanX ID",
"CUSIP",
+ "FIGI",
"Market Price",
"Market Price Source",
"Market Price Date",
@@ -56,6 +57,7 @@ def upload_data(conn, workdate):
"SecondLien",
"LoanXID",
"Cusip",
+ "figi",
"IntexPrice",
"IntexPriceSource",
"IntexPriceDate",
@@ -213,13 +215,13 @@ def upload_data(conn, workdate):
def intex_data(conn, workdate):
basedir = DATA_DIR / ("Indicative_" + workdate)
- cusip_files = [f for f in basedir.iterdir() if "TrInfo" in f.name]
+ tranche_files = [f for f in basedir.iterdir() if "TrInfo" in f.name]
deal_files = [f for f in basedir.iterdir() if "TrInfo" not in f.name]
# first load deal data
for deal_file in deal_files:
upload_deal_data(conn, deal_file)
# then load tranche data
- for cusip_file in cusip_files:
- upload_cusip_data(conn, cusip_file)
+ for tranche_file in tranche_files:
+ upload_tranche_data(conn, tranche_file)
upload_data(conn, workdate)