aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--python/load_indicative.py41
1 files changed, 22 insertions, 19 deletions
diff --git a/python/load_indicative.py b/python/load_indicative.py
index ebe46699..85f02e47 100644
--- a/python/load_indicative.py
+++ b/python/load_indicative.py
@@ -6,13 +6,12 @@ import csv
import pdb
import sys
from common import root, sanitize_float
-from db import conn, query_db, with_connection
+from db import conn
def convertToNone(s):
return None if s in ["", "-", "NR"] else s
-@with_connection
-def upload_cusip_data(conn, filename):
+def upload_cusip_data(conn, filename, logger=None):
dealupdate = {}
with open( filename, "r") as fh:
dr = csv.DictReader(fh, dialect='excel-tab')
@@ -30,12 +29,16 @@ def upload_cusip_data(conn, filename):
dealname = line['dealname']
line = {k: convertToNone(v) for k, v in line.items()}
if dealname not in dealupdate:
- dealupdate[dealname] = query_db("SELECT \"Latest Update\" FROM clo_universe " \
- "WHERE dealname = %s ORDER BY \"Latest Update\" DESC",
- params = (dealname,))[0]
+ with conn.cursor() as c:
+ c.execute("SELECT \"Latest Update\" FROM clo_universe " \
+ "WHERE dealname = %s ORDER BY \"Latest Update\" DESC", (dealname,))
+ dealupdate[dealname] = c.fetchone()[0]
sqlstring = "SELECT updatedate FROM latest_cusip_universe WHERE cusip = %s"
- curr_date = query_db(sqlstring, params = (line['CUSIP'],))
+ with conn.cursor() as c:
+ c.execute(sqlstring, (line['CUSIP'],))
+ curr_date = c.fetchone()
+ conn.commit()
if not curr_date or curr_date[0] < dealupdate[dealname]:
if dealname not in deals_to_update:
deals_to_update.append(dealname)
@@ -65,27 +68,27 @@ def upload_cusip_data(conn, filename):
try:
with conn.cursor() as c:
c.execute(sqlstring, line)
- except psycopg2.DataError:
- pdb.set_trace()
+ except psycopg2.DataError as e:
+ logger.info(e)
print("uploaded: {0}".format(line['CUSIP']))
conn.commit()
-
for dealname in deals_to_update:
- data = query_db("SELECT p_cusip, p_curr_subordination, "\
- "p_curr_thickness from et_deal_subordination(%s)",
- params = (dealname,),
- one = False)
- data = [ (t[1], t[2], t[0], dealupdate[dealname]) for t in data]
with conn.cursor() as c:
+ c.execute("SELECT p_cusip, p_curr_subordination, "\
+ "p_curr_thickness from et_deal_subordination(%s)",
+ (dealname,))
+ data = [ (t[1], t[2], t[0], dealupdate[dealname]) for t in c]
c.executemany("UPDATE cusip_universe SET subordination = %s, "
"thickness = %s WHERE cusip = %s AND "
"updatedate = %s", data)
conn.commit()
-@with_connection
def upload_deal_data(conn, filename):
sqlstr = "select dealname, max(\"Latest Update\") from clo_universe group by dealname"
- deallist = dict(query_db(sqlstr, one=False))
+ with conn.cursor() as c:
+ c.execute(sqlstr)
+ deallist = dict(c)
+ conn.commit()
with open( filename, "r") as fh:
dr = csv.DictReader(fh, dialect='excel-tab')
data = []
@@ -152,8 +155,8 @@ if __name__=="__main__":
#first load deal data
for deal in deal_files:
- upload_deal_data(deal)
+ upload_deal_data(conn, deal)
#then load tranche data
for cusip in cusip_files:
- upload_cusip_data(cusip)
+ upload_cusip_data(conn, cusip)
conn.close()