aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--python/clo_universe.py87
-rw-r--r--python/cusip_universe.py100
-rw-r--r--python/load_indicative.py164
-rw-r--r--python/load_intex_collateral.py16
4 files changed, 166 insertions, 201 deletions
diff --git a/python/clo_universe.py b/python/clo_universe.py
deleted file mode 100644
index 8c37cd5a..00000000
--- a/python/clo_universe.py
+++ /dev/null
@@ -1,87 +0,0 @@
-import os
-import os.path
-import datetime
-from datetime import date
-import csv
-import pdb
-import re
-import sys
-import common
-from common import query_db
-
-def convertToNone(s):
- return None if s=="-" or s=="" or s == "NR" else s
-
-def sanitize_float(intex_float):
- intex_float = intex_float.replace(",", "")
- if "(" in intex_float:
- intex_float = - float(intex_float[1:-1])
- else:
- intex_float = float(intex_float)
- return intex_float
-
-def upload_data(workdate, conn):
- sqlstr = "select dealname, max(\"Latest Update\") from clo_universe group by dealname"
- deallist = dict(query_db(sqlstr, one=False))
- universe = os.path.join("data", "clo_universe_intex_" + workdate + ".txt")
- with open( os.path.join(common.root, universe), "r") as fh:
- dr = csv.DictReader(fh, dialect='excel-tab')
- data = []
- c = conn.cursor()
- for line in dr:
- if not line ['Deal Name'] or (line['Deal Name'] == 'Unknown Security'):
- continue
- if not line['Latest Update']:
- break
- for key in line.keys():
- line[key] = convertToNone(line[key])
- line["CDOpercent"] = line["CDO Pct of Assets that are Structured Finance Obligations"]
- line["Paid Down"] = None
- if "Paid Down" in line["Latest Update"]:
- line["Paid Down"] = re.sub("Paid Down: ","", line["Latest Update"])
- line["Latest Update"] = line["Paid Down"]
- for field in ["Deal Issue Date", "Deal Termination Date", \
- "Deal Next Pay Date", "Reinv End Date", "Latest Update"]:
- if line[field]:
- try:
- line[field] = datetime.datetime.strptime(line[field], '%b %d, %Y').date()
- except ValueError:
- pdb.set_trace()
- for key in ["Collection Account Principal Balance", "Collection Account Interest Balance",
- "Orig Deal Bal", "Curr Deal Bal", "Tranche Curr Bal", "Tranche Orig Bal",
- "CDO Pct of Assets that are Structured Finance Obligations",
- "CDO Defaulted Security Balance (Reported)"]:
- if line[key]:
- line[key] = sanitize_float(line[key])
- dealname = line['Deal,Tr/CUSIP/ISIN']
- line['defaultedbal'] = line["CDO Defaulted Security Balance (Reported)"]
- line['Deal CUSIP List'] = line['Deal CUSIP List'].split(",")
- if dealname not in deallist or line['Latest Update'] > deallist[dealname]:
- sqlstring = \
- "INSERT INTO clo_universe " \
- "VALUES (%(Deal,Tr/CUSIP/ISIN)s, %(Deal Name)s, %(Collateral Manager)s, %(Orig Deal Bal)s," \
- "%(Curr Deal Bal)s, %(Tranche Orig Bal)s, %(Tranche Curr Bal)s, %(Tranche Factor)s," \
- "%(Collection Account Principal Balance)s," \
- "%(Collection Account Interest Balance)s, %(CDOpercent)s, %(defaultedbal)s, " \
- "%(Coupon)s, %(Deal Issue Date)s," \
- "%(Deal Termination Date)s, %(Deal Next Pay Date)s," \
- "%(Reinv End Date)s, %(Latest Update)s, %(Deal CUSIP List)s, %(Paid Down)s)"
- try:
- c.execute(sqlstring, line)
- deallist[dealname] = line['Latest Update']
- except psycopg2.DataError as detail:
- print(detail)
- pdb.set_trace()
- except KeyError as detail:
- print(detail)
- pdb.set_trace()
- conn.commit()
-
-if __name__ == "__main__":
- if len(sys.argv) > 1:
- workdate = sys.argv[1]
- else:
- workdate = str(datetime.date.today())
- upload_data(workdate, common.conn)
- common.conn.close()
- print("done")
diff --git a/python/cusip_universe.py b/python/cusip_universe.py
deleted file mode 100644
index 15102f2b..00000000
--- a/python/cusip_universe.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import psycopg2
-import os
-import os.path
-import datetime
-from datetime import date
-import csv
-import pdb
-import sys
-import common
-from common import query_db
-
-def convertToNone(s):
- return None if s=="" else s
-
-def sanitize_float(intex_float):
- if intex_float.count(".") > 1:
- raise ValueError
- intex_float = intex_float.replace(",", "")
- if "(" in intex_float:
- intex_float = - float(intex_float[1:-1])
- else:
- intex_float = float(intex_float)
- return intex_float
-
-def upload_data(workdate, conn):
- dealupdate = {}
- for cusip_universe_file in os.listdir(os.path.join(common.root, "data", "Trinfo_" + workdate)):
- with open( os.path.join(common.root, "data", "Trinfo_" + workdate, cusip_universe_file), "r") as fh:
- dr = csv.DictReader(fh, dialect='excel-tab')
- data = []
- deals_to_update = []
- c = conn.cursor()
- for line in dr:
- if "ISIN" not in line:
- line['ISIN'] = None
- if "," in line['Tranche']:
- line["dealname"], line["tranche"] = line["Tranche"].split(",")
- else:
- continue
- line["dealname"] = line["dealname"].lower()
- dealname = line['dealname']
- line = {k: convertToNone(v) for k, v in line.items()}
- if dealname not in dealupdate:
- dealupdate[dealname] = query_db("SELECT \"Latest Update\" FROM clo_universe " \
- "WHERE dealname = %s ORDER BY \"Latest Update\" DESC",
- params = (dealname,))[0]
-
- sqlstring = "SELECT updatedate FROM latest_cusip_universe WHERE cusip = %s"
- curr_date = query_db(sqlstring, params = (line['CUSIP'],))
- if not curr_date or curr_date[0] < dealupdate[dealname]:
- if dealname not in deals_to_update:
- deals_to_update.append(dealname)
- line['updatedate'] = dealupdate[dealname]
- try:
- for key in ['Curr Balance', 'Orig Balance', 'Orig Attachment Point',
- 'Curr Attachment Point', 'Orig Detachment Point',
- 'Curr Detachment Point', 'Factor', 'Coupon', 'Floater Spread']:
- if line[key]:
- line[key] = sanitize_float(line[key])
- line[key] = convertToNone(line[key])
- except ValueError:
- continue
-
- if "Curr Moody" not in line:
- line['Curr Moody'] = line['Orig Moody']
- sqlstring = "INSERT INTO cusip_universe(Cusip, ISIN, dealname, tranche, " \
- "Coupon, Orig_Balance, Curr_Balance, Factor, Orig_Moody, Curr_Moody, " \
- "Orig_Attach, Orig_Detach, Curr_Attach, Curr_Detach, Floater_Index, " \
- "Spread, updatedate)" \
- "VALUES (%(CUSIP)s, %(ISIN)s, %(dealname)s, %(tranche)s, %(Coupon)s, " \
- "%(Orig Balance)s, %(Curr Balance)s, %(Factor)s, %(Orig Moody)s, %(Curr Moody)s, " \
- "%(Orig Attachment Point)s, %(Orig Detachment Point)s, %(Curr Attachment Point)s," \
- "%(Curr Detachment Point)s, %(Floater Index)s, %(Floater Spread)s, " \
- "%(updatedate)s)"
- try:
- c.execute(sqlstring, line)
- except psycopg2.DataError:
- pdb.set_trace()
- print("uploaded: {0}".format(line['CUSIP']))
- conn.commit()
-
- for dealname in deals_to_update:
- data = query_db("SELECT p_cusip, p_curr_subordination, "\
- "p_curr_thickness from et_deal_subordination(%s)",
- params = (dealname,),
- one = False)
- data = [ (t[1], t[2], t[0], dealupdate[dealname]) for t in data]
- c.executemany("UPDATE cusip_universe SET subordination = %s, "
- "thickness = %s WHERE cusip = %s AND "
- "updatedate = %s", data)
- conn.commit()
-
-if __name__=="__main__":
- if len(sys.argv) > 1:
- workdate = sys.argv[1]
- else:
- workdate = str(datetime.date.today())
-
- upload_data(workdate, common.conn)
- common.conn.close()
diff --git a/python/load_indicative.py b/python/load_indicative.py
new file mode 100644
index 00000000..2ec69385
--- /dev/null
+++ b/python/load_indicative.py
@@ -0,0 +1,164 @@
+import psycopg2
+import os
+import os.path
+import datetime
+from datetime import date
+import csv
+import pdb
+import sys
+import common
+from common import query_db
+
+def convertToNone(s):
+ return None if s=="" or s=="-" or s=="NR" else s
+
+def sanitize_float(intex_float):
+ if intex_float.count(".") > 1:
+ raise ValueError
+ intex_float = intex_float.replace(",", "")
+ if "(" in intex_float:
+ intex_float = - float(intex_float[1:-1])
+ else:
+ intex_float = float(intex_float)
+ return intex_float
+
+def upload_cusip_data(filename, conn):
+ dealupdate = {}
+ with open( filename, "r") as fh:
+ dr = csv.DictReader(fh, dialect='excel-tab')
+ data = []
+ deals_to_update = []
+ c = conn.cursor()
+ for line in dr:
+ if "ISIN" not in line:
+ line['ISIN'] = None
+ if "," in line['Tranche']:
+ line["dealname"], line["tranche"] = line["Tranche"].split(",")
+ else:
+ continue
+ line["dealname"] = line["dealname"].lower()
+ dealname = line['dealname']
+ line = {k: convertToNone(v) for k, v in line.items()}
+ if dealname not in dealupdate:
+ dealupdate[dealname] = query_db("SELECT \"Latest Update\" FROM clo_universe " \
+ "WHERE dealname = %s ORDER BY \"Latest Update\" DESC",
+ params = (dealname,))[0]
+
+ sqlstring = "SELECT updatedate FROM latest_cusip_universe WHERE cusip = %s"
+ curr_date = query_db(sqlstring, params = (line['CUSIP'],))
+ if not curr_date or curr_date[0] < dealupdate[dealname]:
+ if dealname not in deals_to_update:
+ deals_to_update.append(dealname)
+ line['updatedate'] = dealupdate[dealname]
+ try:
+ for key in ['Curr Balance', 'Orig Balance', 'Orig Attachment Point',
+ 'Curr Attachment Point', 'Orig Detachment Point',
+ 'Curr Detachment Point', 'Factor', 'Coupon', 'Floater Spread']:
+ if line[key]:
+ line[key] = sanitize_float(line[key])
+ line[key] = convertToNone(line[key])
+ except ValueError:
+ continue
+
+ if "Curr Moody" not in line:
+ line['Curr Moody'] = line['Orig Moody']
+ sqlstring = "INSERT INTO cusip_universe(Cusip, ISIN, dealname, tranche, " \
+ "Coupon, Orig_Balance, Curr_Balance, Factor, Orig_Moody, Curr_Moody, " \
+ "Orig_Attach, Orig_Detach, Curr_Attach, Curr_Detach, Floater_Index, " \
+ "Spread, updatedate)" \
+ "VALUES (%(CUSIP)s, %(ISIN)s, %(dealname)s, %(tranche)s, %(Coupon)s, " \
+ "%(Orig Balance)s, %(Curr Balance)s, %(Factor)s, %(Orig Moody)s, %(Curr Moody)s, " \
+ "%(Orig Attachment Point)s, %(Orig Detachment Point)s, %(Curr Attachment Point)s," \
+ "%(Curr Detachment Point)s, %(Floater Index)s, %(Floater Spread)s, " \
+ "%(updatedate)s)"
+ try:
+ c.execute(sqlstring, line)
+ except psycopg2.DataError:
+ pdb.set_trace()
+ print("uploaded: {0}".format(line['CUSIP']))
+ conn.commit()
+
+ for dealname in deals_to_update:
+ data = query_db("SELECT p_cusip, p_curr_subordination, "\
+ "p_curr_thickness from et_deal_subordination(%s)",
+ params = (dealname,),
+ one = False)
+ data = [ (t[1], t[2], t[0], dealupdate[dealname]) for t in data]
+ c.executemany("UPDATE cusip_universe SET subordination = %s, "
+ "thickness = %s WHERE cusip = %s AND "
+ "updatedate = %s", data)
+ conn.commit()
+
+def upload_deal_data(filename, conn):
+ sqlstr = "select dealname, max(\"Latest Update\") from clo_universe group by dealname"
+ deallist = dict(query_db(sqlstr, one=False))
+ with open( filename, "r") as fh:
+ dr = csv.DictReader(fh, dialect='excel-tab')
+ data = []
+ c = conn.cursor()
+ for line in dr:
+ if not line ['Deal Name'] or (line['Deal Name'] == 'Unknown Security'):
+ continue
+ if not line['Latest Update']:
+ break
+ for key in line.keys():
+ line[key] = convertToNone(line[key])
+ line["CDOpercent"] = line["CDO Pct of Assets that are Structured Finance Obligations"]
+ line["Paid Down"] = None
+ if "Paid Down" in line["Latest Update"]:
+ line["Paid Down"] = re.sub("Paid Down: ","", line["Latest Update"])
+ line["Latest Update"] = line["Paid Down"]
+ for field in ["Deal Issue Date", "Deal Termination Date", \
+ "Deal Next Pay Date", "Reinv End Date", "Latest Update"]:
+ if line[field]:
+ try:
+ line[field] = datetime.datetime.strptime(line[field], '%b %d, %Y').date()
+ except ValueError:
+ pdb.set_trace()
+ for key in ["Collection Account Principal Balance", "Collection Account Interest Balance",
+ "Orig Deal Bal", "Curr Deal Bal", "Tranche Curr Bal", "Tranche Orig Bal",
+ "CDO Pct of Assets that are Structured Finance Obligations",
+ "CDO Defaulted Security Balance (Reported)"]:
+ if line[key]:
+ line[key] = sanitize_float(line[key])
+ dealname = line['Deal,Tr/CUSIP/ISIN']
+ line['defaultedbal'] = line["CDO Defaulted Security Balance (Reported)"]
+ line['Deal CUSIP List'] = line['Deal CUSIP List'].split(",")
+ if dealname not in deallist or line['Latest Update'] > deallist[dealname]:
+ sqlstring = \
+ "INSERT INTO clo_universe " \
+ "VALUES (%(Deal,Tr/CUSIP/ISIN)s, %(Deal Name)s, %(Collateral Manager)s, %(Orig Deal Bal)s," \
+ "%(Curr Deal Bal)s, %(Tranche Orig Bal)s, %(Tranche Curr Bal)s, %(Tranche Factor)s," \
+ "%(Collection Account Principal Balance)s," \
+ "%(Collection Account Interest Balance)s, %(CDOpercent)s, %(defaultedbal)s, " \
+ "%(Coupon)s, %(Deal Issue Date)s," \
+ "%(Deal Termination Date)s, %(Deal Next Pay Date)s," \
+ "%(Reinv End Date)s, %(Latest Update)s, %(Deal CUSIP List)s, %(Paid Down)s)"
+ try:
+ c.execute(sqlstring, line)
+ deallist[dealname] = line['Latest Update']
+ except psycopg2.DataError as detail:
+ print(detail)
+ pdb.set_trace()
+ except KeyError as detail:
+ print(detail)
+ pdb.set_trace()
+ conn.commit()
+
+if __name__=="__main__":
+ if len(sys.argv) > 1:
+ workdate = sys.argv[1]
+ else:
+ workdate = str(datetime.date.today())
+ files = [os.path.join(common.root, "data", "Indicative_" + workdate, f) for f in
+ os.listdir(os.path.join(common.root, "data", "Indicative_" + workdate))]
+ cusip_files = [f for f in files if "TrInfo" in f]
+ deal_files = [f for f in files if "TrInfo" not in f]
+
+ #first load deal data
+ for f in deal_files:
+ upload_deal_data(f, common.conn)
+ #then load tranche data
+ for f in cusip_files:
+ upload_cusip_data(f, common.conn)
+ common.conn.close()
diff --git a/python/load_intex_collateral.py b/python/load_intex_collateral.py
index c2cb9c19..a7a0336b 100644
--- a/python/load_intex_collateral.py
+++ b/python/load_intex_collateral.py
@@ -26,14 +26,6 @@ def sanitize_float(intex_float):
intex_float = float(intex_float)
return intex_float
-def get_latest_dealupdate(workdate):
- with open(os.path.join(common.root, "data", "clo_universe_intex_" + workdate + ".txt")) as fh:
- dr = csv.DictReader(fh, dialect='excel-tab')
- deal_table = {line['Deal,Tr/CUSIP/ISIN']:
- datetime.datetime.strptime(line['Latest Update'], '%b %d, %Y').date() \
- for line in dr if 'Paid' not in line['Latest Update'] and line['Latest Update']}
- return deal_table
-
def upload_data(dealnames, workdate, conn):
for dealname in dealnames:
#dealname, updatedate = line.rstrip().split()
@@ -97,12 +89,8 @@ def upload_data(dealnames, workdate, conn):
sqlstr = "select distinct(updatedate) from et_collateral where dealname= %s"
old_update_dates = [date[0] for date in query_db(sqlstr, params=(dealname,), one=False)]
- deal_table = get_latest_dealupdate(workdate)
- try:
- updatedate = deal_table[dealname]
- except KeyError as detail:
- print(detail)
- pdb.set_trace()
+ sqlstr = 'select max("Latest Update") from clo_universe where dealname= %s'
+ updatedate = query_db(sqlstr, params=(dealname,))[0]
reinsert = False
if updatedate in old_update_dates: