aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--python/load_intex_collateral.py29
1 files changed, 19 insertions, 10 deletions
diff --git a/python/load_intex_collateral.py b/python/load_intex_collateral.py
index 7da4b13d..02018762 100644
--- a/python/load_intex_collateral.py
+++ b/python/load_intex_collateral.py
@@ -6,6 +6,7 @@ from db import conn, query_db
import sys
import uuid
from load_indicative import upload_cusip_data, upload_deal_data
+import logging
fields = ['Asset Name', 'Issuer', 'Contributed Balance', 'Maturity Date', \
'Asset Subtype', 'Asset Type', 'Gross Coupon', 'Spread', \
@@ -94,17 +95,22 @@ def upload_data(conn, dealnames, workdate, logger=None):
r[0] = r[0] + str(uuid.uuid4())[:3]
data[r[0]] = r[1:]
fh.close()
-
- sqlstr = "select distinct(updatedate) from et_collateral where dealname= %s"
- old_update_dates = [date[0] for date in query_db(sqlstr, params=(dealname,), one=False)]
-
- sqlstr = "select max(\"Latest Update\") from clo_universe where dealname= %s and \"Latest Update\"<=%s"
- updatedate = query_db(sqlstr, params = (dealname, workdate))[0]
+ sqlstr1 = "select distinct(updatedate) from et_collateral where dealname= %s"
+ sqlstr2 = "select max(\"Latest Update\") from clo_universe where dealname= %s and \"Latest Update\"<=%s"
+ with conn.cursor() as c:
+ c.execute(sqlstr1, (dealname,))
+ old_update_dates = [date[0] for date in c]
+ c.execute(sqlstr2, (dealname, workdate))
+ updatedate = c.fetchone()[0]
+ conn.commit()
# sanity check if we already have the data
reinsert = False
if updatedate in old_update_dates:
sqlstr = "SELECT count(*) FROM et_collateral where dealname = %s and updatedate= %s"
- currlen = query_db(sqlstr, params = (dealname, updatedate))[0]
+ with conn.cursor() as c:
+ c.execute(sqlstr, (dealname, updatedate))
+ currlen = c.fetchone()[0]
+ conn.commit()
if currlen != len(data): #then we delete and just reupload
print("{0} has {1} rows in the database and current collateral file has {2}".format(dealname, currlen, len(data)))
with conn.cursor() as c:
@@ -126,7 +132,10 @@ def upload_data(conn, dealnames, workdate, logger=None):
try:
c.executemany(sqlstr, [(dealname, updatedate, k) + tuple(v) for k, v in data.items()])
except (psycopg2.DataError, TypeError) as detail:
- print(detail)
+ if logger:
+ logger.info(detail)
+ else:
+ print(detail)
pdb.set_trace()
conn.commit()
@@ -139,10 +148,10 @@ def intex_data(conn, workdate, logger=None):
deal_files = [f for f in files if "TrInfo" not in f]
#first load deal data
for deal_file in deal_files:
- upload_deal_data(deal_file)
+ upload_deal_data(conn, deal_file)
#then load tranche data
for cusip_file in cusip_files:
- upload_cusip_data(cusip_file)
+ upload_cusip_data(conn, cusip_file, logger)
upload_data(conn, dealnames, workdate, logger)
if __name__ == "__main__":