aboutsummaryrefslogtreecommitdiffstats
path: root/python/intex/load_indicative.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/intex/load_indicative.py')
-rw-r--r--python/intex/load_indicative.py187
1 files changed, 187 insertions, 0 deletions
diff --git a/python/intex/load_indicative.py b/python/intex/load_indicative.py
new file mode 100644
index 00000000..8fd75b6e
--- /dev/null
+++ b/python/intex/load_indicative.py
@@ -0,0 +1,187 @@
+import psycopg2
+import os
+import datetime
+from datetime import date
+import csv, sys, re
+import pdb
+from common import root, sanitize_float
+from db import conn
+import logging
+
+def convertToNone(s):
+ return None if s in ["", "-", "NR"] else s
+
+def upload_cusip_data(conn, filename):
+ dealupdate = {}
+ with open( filename, "r") as fh:
+ dr = csv.DictReader(fh, dialect='excel-tab')
+ data = []
+ deals_to_update = []
+
+ for line in dr:
+ if "ISIN" not in line:
+ line['ISIN'] = None
+ sp = line["Tranche"].split(",")
+ if len(sp)==2:
+ line["dealname"], line["tranche"] = sp
+ else:
+ continue
+ line["dealname"] = line["dealname"].lower()
+ dealname = line['dealname']
+ line = {k: convertToNone(v) for k, v in line.items()}
+ if dealname not in dealupdate:
+ with conn.cursor() as c:
+ c.execute("SELECT \"Latest Update\" FROM clo_universe " \
+ "WHERE dealname = %s ORDER BY \"Latest Update\" DESC", (dealname,))
+ dealupdate[dealname] = c.fetchone()[0]
+
+ sqlstring = "SELECT updatedate FROM latest_cusip_universe WHERE cusip = %s"
+ with conn.cursor() as c:
+ c.execute(sqlstring, (line['CUSIP'],))
+ curr_date = c.fetchone()
+ conn.commit()
+ if not curr_date or curr_date[0] < dealupdate[dealname]:
+ if dealname not in deals_to_update:
+ deals_to_update.append(dealname)
+ line['updatedate'] = dealupdate[dealname]
+ try:
+ for key in ['Curr Balance', 'Orig Balance', 'Orig Attachment Point',
+ 'Curr Attachment Point (def at MV)', 'Orig Detachment Point',
+ 'Curr Detachment Point (def at MV)', 'Factor', 'Coupon',
+ 'Floater Spread/Margin']:
+ if line[key]:
+ line[key] = sanitize_float(line[key])
+ line[key] = convertToNone(line[key])
+ except ValueError:
+ continue
+ line['Curr Attachment Point'] = line['Curr Attachment Point (def at MV)']
+ line['Curr Detachment Point'] = line['Curr Detachment Point (def at MV)']
+ if "Curr Moody" not in line:
+ if 'Orig Moody' in line:
+ line['Curr Moody'] = line['Orig Moody']
+ else:
+ line['Curr Moody'] = None
+ line['Orig Moody'] = None
+ sqlstring = "INSERT INTO cusip_universe(Cusip, ISIN, \"Bloomberg Ticker\", dealname, tranche, " \
+ "Coupon, Orig_Balance, Curr_Balance, Factor, Orig_Moody, Curr_Moody, " \
+ "Orig_Attach, Orig_Detach, Curr_Attach, Curr_Detach, Floater_Index, " \
+ "Spread, updatedate) " \
+ "VALUES(%(CUSIP)s, %(ISIN)s, %(Bloomberg Ticker)s, %(dealname)s, %(tranche)s, %(Coupon)s, " \
+ "%(Orig Balance)s, %(Curr Balance)s, %(Factor)s, %(Orig Moody)s, %(Curr Moody)s, " \
+ "%(Orig Attachment Point)s, %(Orig Detachment Point)s, "\
+ "%(Curr Attachment Point)s, %(Curr Detachment Point)s, " \
+ "%(Floater Index)s, %(Floater Spread/Margin)s, %(updatedate)s)"
+ try:
+ with conn.cursor() as c:
+ c.execute(sqlstring, line)
+ except psycopg2.DataError as e:
+ logging.error(e)
+ logging.debug("uploaded: {0}".format(line['CUSIP']))
+ conn.commit()
+ # for dealname in deals_to_update:
+ # with conn.cursor() as c:
+ # c.execute("SELECT p_cusip, p_curr_subordination, "\
+ # "p_curr_thickness from et_deal_subordination(%s)",
+ # (dealname,))
+ # data = [ (t[1], t[2], t[0], dealupdate[dealname]) for t in c]
+ # c.executemany("UPDATE cusip_universe SET subordination = %s, "
+ # "thickness = %s WHERE cusip = %s AND "
+ # "updatedate = %s", data)
+ # conn.commit()
+
+def upload_deal_data(conn, filename):
+ sqlstr = "select dealname, array_agg(\"Latest Update\") from clo_universe group by dealname"
+ with conn.cursor() as c:
+ c.execute(sqlstr)
+ deallist1 = dict(c)
+ sqlstr = "select dealname from deal_indicative"
+ with conn.cursor() as c:
+ c.execute(sqlstr)
+ deallist2 = [d[0] for d in c]
+ conn.commit()
+ with open( filename, "r") as fh:
+ dr = csv.DictReader(fh, dialect='excel-tab')
+ data = []
+ for line in dr:
+ if not line['Deal Name, Tranche Name'] or (line['Deal Name, Tranche Name'] == 'Unknown Security'):
+ continue
+ if not line['Latest Update']:
+ continue
+ for key in line.keys():
+ line[key] = convertToNone(line[key])
+ line["CDOpercent"] = line["CDO Pct of Assets that are Structured Finance Obligations"]
+ line["Paid Down"] = None
+ if "Paid Down" in line["Latest Update"]:
+ line["Paid Down"] = re.sub("Paid Down: ","", line["Latest Update"])
+ line["Latest Update"] = line["Paid Down"]
+ for field in ["Deal Issue Date", "Deal Termination Date", "Reinv End Date", \
+ "Latest Update", "Pay Day", "Deal First Pay Date", "Paid Down"]:
+ if line[field]:
+ try:
+ line[field] = datetime.datetime.strptime(line[field], '%b %d, %Y').date()
+ except ValueError:
+ pdb.set_trace()
+ if line["Pay Day"]:
+ line["Pay Day"] = line["Pay Day"].day
+ for key in ["Collection Account Principal Balance", "Collection Account Interest Balance",
+ "Curr Deal Bal", "Tranche Curr Bal",
+ "CDO Pct of Assets that are Structured Finance Obligations",
+ "CDO Defaulted Security Balance (Reported)"]:
+ if line[key]:
+ line[key] = sanitize_float(line[key])
+ line['Deal/Tranche ID'] = line['Deal/Tranche ID'].lower()
+ dealname = line['Deal/Tranche ID']
+ line['defaultedbal'] = line["CDO Defaulted Security Balance (Reported)"]
+ if dealname not in deallist2:
+ for key in ["Orig Deal Bal", "Tranche Orig Bal"]:
+ if line[key]:
+ line[key] = sanitize_float(line[key])
+ if line['Deal CUSIP List']:
+ line['Deal CUSIP List'] = line['Deal CUSIP List'].split(",")
+ sqlstr = "INSERT INTO deal_indicative VALUES( %(Deal/Tranche ID)s, %(Deal Name)s, " \
+ "%(Collateral Manager)s, %(Deal Issue Date)s, %(Deal Termination Date)s, " \
+ "%(Pay Day)s, %(Reinv End Date)s, %(Deal First Pay Date)s, %(Orig Deal Bal)s, " \
+ "%(Tranche Orig Bal)s, %(Deal CUSIP List)s, %(Paid Down)s)"
+ try:
+ with conn.cursor() as c:
+ c.execute(sqlstr, line)
+ except (psycopg2.DataError, KeyError) as detail:
+ logging.error(detail)
+ pdb.set_trace()
+ with conn.cursor() as c:
+ if line['Paid Down']:
+ c.execute("UPDATE deal_indicative SET paid_down=%s WHERE dealname=%s",
+ (line['Paid Down'], dealname))
+ if dealname not in deallist1 or line['Latest Update'] not in deallist1[dealname]:
+ sqlstring = \
+ "INSERT INTO clo_universe " \
+ "VALUES (%(Deal/Tranche ID)s, %(Curr Deal Bal)s, %(Tranche Curr Bal)s, " \
+ "%(Tranche Factor)s, %(Collection Account Principal Balance)s, " \
+ "%(Collection Account Interest Balance)s, %(CDOpercent)s, %(defaultedbal)s, " \
+ "%(Coupon)s, %(Latest Update)s)"
+ try:
+ with conn.cursor() as c:
+ c.execute(sqlstring, line)
+ deallist1[dealname] = [line['Latest Update']]
+ except (psycopg2.DataError, KeyError) as detail:
+ logging.error(detail)
+ pdb.set_trace()
+ conn.commit()
+
+if __name__=="__main__":
+ if len(sys.argv) > 1:
+ workdate = sys.argv[1]
+ else:
+ workdate = str(datetime.date.today())
+ files = [os.path.join(root, "data", "Indicative_" + workdate, f) for f in
+ os.listdir(os.path.join(root, "data", "Indicative_" + workdate))]
+ cusip_files = [f for f in files if "TrInfo" in f]
+ deal_files = [f for f in files if "TrInfo" not in f]
+
+ #first load deal data
+ for deal in deal_files:
+ upload_deal_data(conn, deal)
+ #then load tranche data
+ for cusip in cusip_files:
+ upload_cusip_data(conn, cusip)
+ conn.close()