1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
|
import psycopg2
import os
import datetime
from datetime import date
import csv
import pdb
import sys
from common import root, sanitize_float
from db import conn, query_db, with_connection
def convertToNone(s):
return None if s in ["", "-", "NR"] else s
@with_connection
def upload_cusip_data(conn, filename):
dealupdate = {}
with open( filename, "r") as fh:
dr = csv.DictReader(fh, dialect='excel-tab')
data = []
deals_to_update = []
for line in dr:
if "ISIN" not in line:
line['ISIN'] = None
if "," in line['Tranche']:
line["dealname"], line["tranche"] = line["Tranche"].split(",")
else:
continue
line["dealname"] = line["dealname"].lower()
dealname = line['dealname']
line = {k: convertToNone(v) for k, v in line.items()}
if dealname not in dealupdate:
dealupdate[dealname] = query_db("SELECT \"Latest Update\" FROM clo_universe " \
"WHERE dealname = %s ORDER BY \"Latest Update\" DESC",
params = (dealname,))[0]
sqlstring = "SELECT updatedate FROM latest_cusip_universe WHERE cusip = %s"
curr_date = query_db(sqlstring, params = (line['CUSIP'],))
if not curr_date or curr_date[0] < dealupdate[dealname]:
if dealname not in deals_to_update:
deals_to_update.append(dealname)
line['updatedate'] = dealupdate[dealname]
try:
for key in ['Curr Balance', 'Orig Balance', 'Orig Attachment Point',
'Curr Attachment Point (def at MV)', 'Orig Detachment Point',
'Curr Detachment Point (def at MV)', 'Factor', 'Coupon', 'Floater Spread']:
if line[key]:
line[key] = sanitize_float(line[key])
line[key] = convertToNone(line[key])
except ValueError:
continue
line['Curr Attachment Point'] = line['Curr Attachment Point (def at MV)']
line['Curr Detachment Point'] = line['Curr Detachment Point (def at MV)']
if "Curr Moody" not in line:
line['Curr Moody'] = line['Orig Moody']
sqlstring = "INSERT INTO cusip_universe(Cusip, ISIN, \"Bloomberg Ticker\", dealname, tranche, " \
"Coupon, Orig_Balance, Curr_Balance, Factor, Orig_Moody, Curr_Moody, " \
"Orig_Attach, Orig_Detach, Curr_Attach, Curr_Detach, Floater_Index, " \
"Spread, updatedate) " \
"VALUES(%(CUSIP)s, %(ISIN)s, %(Bloomberg Ticker)s, %(dealname)s, %(tranche)s, %(Coupon)s, " \
"%(Orig Balance)s, %(Curr Balance)s, %(Factor)s, %(Orig Moody)s, %(Curr Moody)s, " \
"%(Orig Attachment Point)s, %(Orig Detachment Point)s, "\
"%(Curr Attachment Point)s, %(Curr Detachment Point)s, " \
"%(Floater Index)s, %(Floater Spread)s, %(updatedate)s)"
try:
with conn.cursor() as c:
c.execute(sqlstring, line)
except psycopg2.DataError:
pdb.set_trace()
print("uploaded: {0}".format(line['CUSIP']))
conn.commit()
for dealname in deals_to_update:
data = query_db("SELECT p_cusip, p_curr_subordination, "\
"p_curr_thickness from et_deal_subordination(%s)",
params = (dealname,),
one = False)
data = [ (t[1], t[2], t[0], dealupdate[dealname]) for t in data]
with conn.cursor() as c:
c.executemany("UPDATE cusip_universe SET subordination = %s, "
"thickness = %s WHERE cusip = %s AND "
"updatedate = %s", data)
conn.commit()
@with_connection
def upload_deal_data(conn, filename):
sqlstr = "select dealname, max(\"Latest Update\") from clo_universe group by dealname"
deallist = dict(query_db(sqlstr, one=False))
with open( filename, "r") as fh:
dr = csv.DictReader(fh, dialect='excel-tab')
data = []
for line in dr:
if not line['Deal Name, Tranche Name'] or (line['Deal Name, Tranche Name'] == 'Unknown Security'):
continue
if not line['Latest Update']:
break
for key in line.keys():
line[key] = convertToNone(line[key])
line["CDOpercent"] = line["CDO Pct of Assets that are Structured Finance Obligations"]
line["Paid Down"] = None
if "Paid Down" in line["Latest Update"]:
line["Paid Down"] = re.sub("Paid Down: ","", line["Latest Update"])
line["Latest Update"] = line["Paid Down"]
for field in ["Deal Issue Date", "Deal Termination Date", \
"Deal Next Pay Date", "Reinv End Date", "Latest Update", "Pay Day"]:
if line[field]:
try:
line[field] = datetime.datetime.strptime(line[field], '%b %d, %Y').date()
except ValueError:
pdb.set_trace()
for key in ["Collection Account Principal Balance", "Collection Account Interest Balance",
"Orig Deal Bal", "Curr Deal Bal", "Tranche Curr Bal", "Tranche Orig Bal",
"CDO Pct of Assets that are Structured Finance Obligations",
"CDO Defaulted Security Balance (Reported)"]:
if line[key]:
line[key] = sanitize_float(line[key])
dealname = line['Deal/Tranche ID']
line['defaultedbal'] = line["CDO Defaulted Security Balance (Reported)"]
line['Deal CUSIP List'] = line['Deal CUSIP List'].split(",")
if dealname not in deallist or line['Latest Update'] > deallist[dealname]:
sqlstring = \
"INSERT INTO clo_universe " \
"VALUES (%(Deal/Tranche ID)s, %(Deal Name)s, %(Collateral Manager)s, %(Orig Deal Bal)s," \
"%(Curr Deal Bal)s, %(Tranche Orig Bal)s, %(Tranche Curr Bal)s, %(Tranche Factor)s," \
"%(Collection Account Principal Balance)s," \
"%(Collection Account Interest Balance)s, %(CDOpercent)s, %(defaultedbal)s, " \
"%(Coupon)s, %(Deal Issue Date)s," \
"%(Deal Termination Date)s, %(Deal Next Pay Date)s, %(Reinv End Date)s, " \
"%(Latest Update)s, %(Deal CUSIP List)s, %(Paid Down)s, %(Pay Day)s)"
try:
with conn.cursor() as c:
c.execute(sqlstring, line)
deallist[dealname] = line['Latest Update']
except psycopg2.DataError as detail:
print(detail)
pdb.set_trace()
except KeyError as detail:
print(detail)
pdb.set_trace()
conn.commit()
if __name__=="__main__":
if len(sys.argv) > 1:
workdate = sys.argv[1]
else:
workdate = str(datetime.date.today())
files = [os.path.join(root, "data", "Indicative_" + workdate, f) for f in
os.listdir(os.path.join(root, "data", "Indicative_" + workdate))]
cusip_files = [f for f in files if "TrInfo" in f]
deal_files = [f for f in files if "TrInfo" not in f]
#first load deal data
for deal in deal_files:
upload_deal_data(deal)
#then load tranche data
for cusip in cusip_files:
upload_cusip_data(cusip)
conn.close()
|