diff options
Diffstat (limited to 'python/load_indicative.py')
| -rw-r--r-- | python/load_indicative.py | 33 |
1 files changed, 17 insertions, 16 deletions
diff --git a/python/load_indicative.py b/python/load_indicative.py index 807c73a3..03dad396 100644 --- a/python/load_indicative.py +++ b/python/load_indicative.py @@ -6,8 +6,8 @@ from datetime import date import csv import pdb import sys -import common -from common import query_db +from common import root +from db import conn, query_db, with_connection def convertToNone(s): return None if s=="" or s=="-" or s=="NR" else s @@ -22,7 +22,8 @@ def sanitize_float(intex_float): intex_float = float(intex_float) return intex_float -def upload_cusip_data(filename, conn): +@with_connection +def upload_cusip_data(conn, filename): dealupdate = {} with open( filename, "r") as fh: dr = csv.DictReader(fh, dialect='excel-tab') @@ -40,12 +41,12 @@ def upload_cusip_data(filename, conn): dealname = line['dealname'] line = {k: convertToNone(v) for k, v in line.items()} if dealname not in dealupdate: - dealupdate[dealname] = query_db(conn, "SELECT \"Latest Update\" FROM clo_universe " \ + dealupdate[dealname] = query_db("SELECT \"Latest Update\" FROM clo_universe " \ "WHERE dealname = %s ORDER BY \"Latest Update\" DESC", params = (dealname,))[0] sqlstring = "SELECT updatedate FROM latest_cusip_universe WHERE cusip = %s" - curr_date = query_db(conn, sqlstring, params = (line['CUSIP'],)) + curr_date = query_db(sqlstring, params = (line['CUSIP'],)) if not curr_date or curr_date[0] < dealupdate[dealname]: if dealname not in deals_to_update: deals_to_update.append(dealname) @@ -80,7 +81,7 @@ def upload_cusip_data(filename, conn): conn.commit() for dealname in deals_to_update: - data = query_db(conn, "SELECT p_cusip, p_curr_subordination, "\ + data = query_db("SELECT p_cusip, p_curr_subordination, "\ "p_curr_thickness from et_deal_subordination(%s)", params = (dealname,), one = False) @@ -91,13 +92,13 @@ def upload_cusip_data(filename, conn): "updatedate = %s", data) conn.commit() -def upload_deal_data(filename, conn): +@with_connection +def upload_deal_data(conn, filename): sqlstr = "select dealname, max(\"Latest Update\") from clo_universe group by dealname" - deallist = dict(query_db(conn, sqlstr, one=False)) + deallist = dict(query_db(sqlstr, one=False)) with open( filename, "r") as fh: dr = csv.DictReader(fh, dialect='excel-tab') data = [] - c = conn.cursor() for line in dr: if not line ['Deal Name'] or (line['Deal Name'] == 'Unknown Security'): continue @@ -153,15 +154,15 @@ if __name__=="__main__": workdate = sys.argv[1] else: workdate = str(datetime.date.today()) - files = [os.path.join(common.root, "data", "Indicative_" + workdate, f) for f in - os.listdir(os.path.join(common.root, "data", "Indicative_" + workdate))] + files = [os.path.join(root, "data", "Indicative_" + workdate, f) for f in + os.listdir(os.path.join(root, "data", "Indicative_" + workdate))] cusip_files = [f for f in files if "TrInfo" in f] deal_files = [f for f in files if "TrInfo" not in f] #first load deal data - for f in deal_files: - upload_deal_data(f, common.conn) + for deal in deal_files: + upload_deal_data(deal) #then load tranche data - for f in cusip_files: - upload_cusip_data(f, common.conn) - common.conn.close() + for cusip in cusip_files: + upload_cusip_data(cusip) + conn.close() |
