aboutsummaryrefslogtreecommitdiffstats
path: root/python/load_indicative.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/load_indicative.py')
-rw-r--r--python/load_indicative.py164
1 files changed, 164 insertions, 0 deletions
diff --git a/python/load_indicative.py b/python/load_indicative.py
new file mode 100644
index 00000000..2ec69385
--- /dev/null
+++ b/python/load_indicative.py
@@ -0,0 +1,164 @@
+import psycopg2
+import os
+import os.path
+import datetime
+from datetime import date
+import csv
+import pdb
+import sys
+import common
+from common import query_db
+
+def convertToNone(s):
+ return None if s=="" or s=="-" or s=="NR" else s
+
+def sanitize_float(intex_float):
+ if intex_float.count(".") > 1:
+ raise ValueError
+ intex_float = intex_float.replace(",", "")
+ if "(" in intex_float:
+ intex_float = - float(intex_float[1:-1])
+ else:
+ intex_float = float(intex_float)
+ return intex_float
+
+def upload_cusip_data(filename, conn):
+ dealupdate = {}
+ with open( filename, "r") as fh:
+ dr = csv.DictReader(fh, dialect='excel-tab')
+ data = []
+ deals_to_update = []
+ c = conn.cursor()
+ for line in dr:
+ if "ISIN" not in line:
+ line['ISIN'] = None
+ if "," in line['Tranche']:
+ line["dealname"], line["tranche"] = line["Tranche"].split(",")
+ else:
+ continue
+ line["dealname"] = line["dealname"].lower()
+ dealname = line['dealname']
+ line = {k: convertToNone(v) for k, v in line.items()}
+ if dealname not in dealupdate:
+ dealupdate[dealname] = query_db("SELECT \"Latest Update\" FROM clo_universe " \
+ "WHERE dealname = %s ORDER BY \"Latest Update\" DESC",
+ params = (dealname,))[0]
+
+ sqlstring = "SELECT updatedate FROM latest_cusip_universe WHERE cusip = %s"
+ curr_date = query_db(sqlstring, params = (line['CUSIP'],))
+ if not curr_date or curr_date[0] < dealupdate[dealname]:
+ if dealname not in deals_to_update:
+ deals_to_update.append(dealname)
+ line['updatedate'] = dealupdate[dealname]
+ try:
+ for key in ['Curr Balance', 'Orig Balance', 'Orig Attachment Point',
+ 'Curr Attachment Point', 'Orig Detachment Point',
+ 'Curr Detachment Point', 'Factor', 'Coupon', 'Floater Spread']:
+ if line[key]:
+ line[key] = sanitize_float(line[key])
+ line[key] = convertToNone(line[key])
+ except ValueError:
+ continue
+
+ if "Curr Moody" not in line:
+ line['Curr Moody'] = line['Orig Moody']
+ sqlstring = "INSERT INTO cusip_universe(Cusip, ISIN, dealname, tranche, " \
+ "Coupon, Orig_Balance, Curr_Balance, Factor, Orig_Moody, Curr_Moody, " \
+ "Orig_Attach, Orig_Detach, Curr_Attach, Curr_Detach, Floater_Index, " \
+ "Spread, updatedate)" \
+ "VALUES (%(CUSIP)s, %(ISIN)s, %(dealname)s, %(tranche)s, %(Coupon)s, " \
+ "%(Orig Balance)s, %(Curr Balance)s, %(Factor)s, %(Orig Moody)s, %(Curr Moody)s, " \
+ "%(Orig Attachment Point)s, %(Orig Detachment Point)s, %(Curr Attachment Point)s," \
+ "%(Curr Detachment Point)s, %(Floater Index)s, %(Floater Spread)s, " \
+ "%(updatedate)s)"
+ try:
+ c.execute(sqlstring, line)
+ except psycopg2.DataError:
+ pdb.set_trace()
+ print("uploaded: {0}".format(line['CUSIP']))
+ conn.commit()
+
+ for dealname in deals_to_update:
+ data = query_db("SELECT p_cusip, p_curr_subordination, "\
+ "p_curr_thickness from et_deal_subordination(%s)",
+ params = (dealname,),
+ one = False)
+ data = [ (t[1], t[2], t[0], dealupdate[dealname]) for t in data]
+ c.executemany("UPDATE cusip_universe SET subordination = %s, "
+ "thickness = %s WHERE cusip = %s AND "
+ "updatedate = %s", data)
+ conn.commit()
+
+def upload_deal_data(filename, conn):
+ sqlstr = "select dealname, max(\"Latest Update\") from clo_universe group by dealname"
+ deallist = dict(query_db(sqlstr, one=False))
+ with open( filename, "r") as fh:
+ dr = csv.DictReader(fh, dialect='excel-tab')
+ data = []
+ c = conn.cursor()
+ for line in dr:
+ if not line ['Deal Name'] or (line['Deal Name'] == 'Unknown Security'):
+ continue
+ if not line['Latest Update']:
+ break
+ for key in line.keys():
+ line[key] = convertToNone(line[key])
+ line["CDOpercent"] = line["CDO Pct of Assets that are Structured Finance Obligations"]
+ line["Paid Down"] = None
+ if "Paid Down" in line["Latest Update"]:
+ line["Paid Down"] = re.sub("Paid Down: ","", line["Latest Update"])
+ line["Latest Update"] = line["Paid Down"]
+ for field in ["Deal Issue Date", "Deal Termination Date", \
+ "Deal Next Pay Date", "Reinv End Date", "Latest Update"]:
+ if line[field]:
+ try:
+ line[field] = datetime.datetime.strptime(line[field], '%b %d, %Y').date()
+ except ValueError:
+ pdb.set_trace()
+ for key in ["Collection Account Principal Balance", "Collection Account Interest Balance",
+ "Orig Deal Bal", "Curr Deal Bal", "Tranche Curr Bal", "Tranche Orig Bal",
+ "CDO Pct of Assets that are Structured Finance Obligations",
+ "CDO Defaulted Security Balance (Reported)"]:
+ if line[key]:
+ line[key] = sanitize_float(line[key])
+ dealname = line['Deal,Tr/CUSIP/ISIN']
+ line['defaultedbal'] = line["CDO Defaulted Security Balance (Reported)"]
+ line['Deal CUSIP List'] = line['Deal CUSIP List'].split(",")
+ if dealname not in deallist or line['Latest Update'] > deallist[dealname]:
+ sqlstring = \
+ "INSERT INTO clo_universe " \
+ "VALUES (%(Deal,Tr/CUSIP/ISIN)s, %(Deal Name)s, %(Collateral Manager)s, %(Orig Deal Bal)s," \
+ "%(Curr Deal Bal)s, %(Tranche Orig Bal)s, %(Tranche Curr Bal)s, %(Tranche Factor)s," \
+ "%(Collection Account Principal Balance)s," \
+ "%(Collection Account Interest Balance)s, %(CDOpercent)s, %(defaultedbal)s, " \
+ "%(Coupon)s, %(Deal Issue Date)s," \
+ "%(Deal Termination Date)s, %(Deal Next Pay Date)s," \
+ "%(Reinv End Date)s, %(Latest Update)s, %(Deal CUSIP List)s, %(Paid Down)s)"
+ try:
+ c.execute(sqlstring, line)
+ deallist[dealname] = line['Latest Update']
+ except psycopg2.DataError as detail:
+ print(detail)
+ pdb.set_trace()
+ except KeyError as detail:
+ print(detail)
+ pdb.set_trace()
+ conn.commit()
+
+if __name__=="__main__":
+ if len(sys.argv) > 1:
+ workdate = sys.argv[1]
+ else:
+ workdate = str(datetime.date.today())
+ files = [os.path.join(common.root, "data", "Indicative_" + workdate, f) for f in
+ os.listdir(os.path.join(common.root, "data", "Indicative_" + workdate))]
+ cusip_files = [f for f in files if "TrInfo" in f]
+ deal_files = [f for f in files if "TrInfo" not in f]
+
+ #first load deal data
+ for f in deal_files:
+ upload_deal_data(f, common.conn)
+ #then load tranche data
+ for f in cusip_files:
+ upload_cusip_data(f, common.conn)
+ common.conn.close()