diff options
Diffstat (limited to 'python')
| -rw-r--r-- | python/clo_universe.py | 92 | ||||
| -rw-r--r-- | python/cusip_universe.py | 74 | ||||
| -rw-r--r-- | python/intex_scenarios.py | 42 | ||||
| -rw-r--r-- | python/list_dealnames.py | 6 | ||||
| -rw-r--r-- | python/load_bloomberg_data.py | 72 | ||||
| -rw-r--r-- | python/load_intex_collateral.py | 101 | ||||
| -rw-r--r-- | python/load_markit_data.py | 59 | ||||
| -rw-r--r-- | python/markit_download.py | 6 | ||||
| -rw-r--r-- | python/query_runner.py | 12 | ||||
| -rw-r--r-- | python/select_clean.py | 12 |
10 files changed, 476 insertions, 0 deletions
diff --git a/python/clo_universe.py b/python/clo_universe.py new file mode 100644 index 00000000..ae3f0988 --- /dev/null +++ b/python/clo_universe.py @@ -0,0 +1,92 @@ +import psycopg2 +import os +import os.path +import datetime +from datetime import date +import csv +import pdb +import re + +if os.name=='nt': + root = "//WDSENTINEL/share/CorpCDOs/" +elif os.name=='posix': + root = "/home/share/CorpCDOs/" + +universe = "data/clo_universe_intex_2012-11-21.txt" + +def convertToNone(s): + return None if s=="-" or s=="" else s + +conn = psycopg2.connect(database="ET", + user="et_user", + password="Serenitas1", + host="192.168.1.108") +cursor = conn.cursor() + +cursor.execute("select dealname, max(\"Latest Update\") from clo_universe group by dealname") +deallist = dict(cursor.fetchall()) + +with open( os.path.join(root, universe), "r") as fh: + dr = csv.DictReader(fh, dialect='excel-tab') + data = [] + for line in dr: + if line['Deal Name'] == 'Unknown Security': + continue + if line['Latest Update']=='' or line['Latest Update'] is None: + break + for key in line.keys(): + line[key] = convertToNone(line[key]) + line["CDOpercent"] = line["CDO Pct of Assets that are Structured Finance Obligations"] + if line["CDOpercent"] == "NR": + line["CDOpercent"] = None + line["Paid Down"] = None + if "Paid Down" in line["Latest Update"]: + line["Paid Down"] = re.sub("Paid Down: ","", line["Latest Update"]) + line["Latest Update"] = line["Paid Down"] + for field in ["Deal Issue Date", "Deal Termination Date", \ + "Deal Next Pay Date", "Reinv End Date", "Latest Update"]: + if line[field] is not None: + try: + line[field] = datetime.datetime.strptime(line[field], '%b %d, %Y').date() + except ValueError: + pdb.set_trace() + for key in ["Collection Account Principal Balance", "Collection Account Interest Balance", + "Orig Deal Bal", "Curr Deal Bal", "Tranche Curr Bal", "Tranche Orig Bal", + "CDO Pct of Assets that are Structured Finance Obligations", + "CDO Defaulted Security Balance (Reported)"]: + if line[key]: + line[key] = line[key].replace(",", "") + dealname = line['Deal,Tr/CUSIP/ISIN'] + line['defaultedbal'] = line["CDO Defaulted Security Balance (Reported)"] + if dealname not in deallist or line['Latest Update'] > deallist[dealname]: + sqlstring = \ + "INSERT INTO clo_universe(dealname, \"Deal Name\", Manager, \"Orig Deal Bal\"," \ + "\"Curr Deal Bal\", \"Orig Collat Bal\", \"Curr Collat Bal\", \"Tranche Factor\"," \ + "\"Principal Bal\", \"Interest Bal\",\"CDO Percentage\", \"Defaulted Bal\", \"Curr Coupon\"," \ + "\"Deal Issue Date\", Maturity, \"Deal Next Pay Date\", \"Reinv End Date\"," \ + "\"Latest Update\", \"Deal Cusip List\", PaidDown)" \ + "VALUES (%(Deal,Tr/CUSIP/ISIN)s, %(Deal Name)s, %(Collateral Manager)s, %(Orig Deal Bal)s," \ + "%(Curr Deal Bal)s, %(Tranche Orig Bal)s, %(Tranche Curr Bal)s, %(Tranche Factor)s," \ + "%(Collection Account Principal Balance)s," \ + "%(Collection Account Interest Balance)s, %(CDOpercent)s, %(defaultedbal)s, " \ + "%(Curr Coupon)s, %(Deal Issue Date)s," \ + "%(Deal Termination Date)s, %(Deal Next Pay Date)s," \ + "%(Reinv End Date)s, %(Latest Update)s, %(Deal CUSIP List)s, %(Paid Down)s)" + try: + cursor.execute(sqlstring, line) + except psycopg2.DataError: + pdb.set_trace() + +conn.commit() + +# cursor.execute("select dealname,\"Deal Cusip List\" from clo_universe"); +# cursor2 = conn.cursor() +# for record in cursor: +# for cusip in record[1].split(","): +# tuple = (record[0], cusip) +# cursor2.execute("INSERT INTO dealcusipmapping(dealname, cusip) VALUES(%s, %s)", tuple) +# cursor2.close() +# conn.commit() + +cursor.close() +conn.close() diff --git a/python/cusip_universe.py b/python/cusip_universe.py new file mode 100644 index 00000000..d88a44e0 --- /dev/null +++ b/python/cusip_universe.py @@ -0,0 +1,74 @@ +import psycopg2 +import os +import os.path +import datetime +from datetime import date +import csv +import pdb +import re + +if os.name=='nt': + root = "//WDSENTINEL/share/CorpCDOs/" +elif os.name=='posix': + root = "/home/share/CorpCDOs/" + +def convertToNone(s): + return None if s=="" else s + +conn = psycopg2.connect(database="ET", + user="et_user", + password="Serenitas1", + host="192.168.1.108") +cursor = conn.cursor() + +prog = re.compile("\((.*)\)") + +cursor.execute("delete from cusip_universe") +count = 0 +for i in xrange(9): + cusip_universe_file = "TrInfo{0}.txt".format(i+1) + with open( os.path.join(root, "data", cusip_universe_file), "r") as fh: + dr = csv.DictReader(fh, dialect='excel-tab') + data = [] + for line in dr: + count += 1 + if "," in line['Tranche']: + line["dealname"], line["tranche"] = line["Tranche"].split(",") + else: + continue + line["dealname"] = line["dealname"].lower() + for key in ['Curr Balance', 'Orig Balance', 'Orig Attachment Point', 'Curr Attachment Point', + 'Orig Detachment Point', 'Curr Detachment Point', 'Factor', 'Coupon']: + line[key] = line[key].replace(",", "") + if prog.match(line[key]): + try: + line[key] = float(prog.match(line[key]).group(1)) + except ValueError: + pdb.set_trace() + line[key] = convertToNone(line[key]) + if "," in str(line['Floater Spread']): + line['Floater Spread'] = line['Floater Spread'].split(",")[0] + for key in ['Floater Spread', 'Floater Index']: + if prog.match(line[key]): + try: + line[key] = float(prog.match(line[key]).group(1)) + except ValueError: + pdb.set_trace() + line[key] = convertToNone(line[key]) + sqlstring = "INSERT INTO cusip_universe(cusip, ISIN, dealname, tranche," \ + "coupon, Orig_Balance, Curr_Balance, Factor, Orig_moody, Curr_moody, " \ + "Orig_attach, Orig_detach, Curr_attach, Curr_detach, Floater_Index," \ + "Spread) " \ + "VALUES (%(CUSIP)s, %(ISIN)s, %(dealname)s, %(tranche)s, %(Coupon)s, " \ + "%(Orig Balance)s, %(Curr Balance)s, %(Factor)s, %(Orig Moody)s, %(Curr Moody)s, " \ + "%(Orig Attachment Point)s, %(Orig Detachment Point)s, %(Curr Attachment Point)s," \ + "%(Curr Detachment Point)s, %(Floater Index)s, %(Floater Spread)s)" + try: + cursor.execute(sqlstring, line) + except psycopg2.DataError as error: + pdb.set_trace() + print count + conn.commit() + +cursor.close() +conn.close() diff --git a/python/intex_scenarios.py b/python/intex_scenarios.py new file mode 100644 index 00000000..70bf8226 --- /dev/null +++ b/python/intex_scenarios.py @@ -0,0 +1,42 @@ +import os
+import os.path
+import datetime
+from datetime import date
+import pdb
+import csv
+
+if os.name =='nt':
+ root = "//WDsentinel/share/CorpCDOs/Scenarios"
+elif os.name == 'posix':
+ root = '/home/share/CorpCDOs/Scenarios'
+input = os.path.join(root, "prometheus.sss")
+for dealname in ["stonln1", "babs072", "flags5", "cent11", "wasatl", "oceant2", "acacl071", "limes"]:
+ output = os.path.join(root, dealname + ".sss")
+ cdrscenarios = os.path.join(root, dealname + "-cdr.csv")
+ recoveryscenarios = os.path.join(root, dealname + "-recovery.csv")
+ fh2 = open(output, "w")
+ fhcdr = open(cdrscenarios, "r")
+ fhrecovery = open(recoveryscenarios, "r")
+ cdrline = "\t".join(fhcdr.readline().rstrip().split(",")) +"\n"
+ recoveryline = "\t".join(fhrecovery.readline().rstrip().split(",")) + "\n"
+
+ i=1
+ with open(input) as fh:
+ for line in fh:
+ if "LOSS_RATE[DEAL,{0}]".format(i) in line:
+ newcdrline = "LOSS_RATE[DEAL,{0}]=".format(i) + cdrline
+ fh2.write(newcdrline)
+ continue
+ if "LOSS_SEVERITY[DEAL,{0}]".format(i) in line:
+ newrecoveryline = "LOSS_SEVERITY[DEAL,{0}]=".format(i) + recoveryline
+ fh2.write(newrecoveryline)
+ i=i+1
+ cdrline = "\t".join(fhcdr.readline().rstrip().split(",")) + "\n"
+ recoveryline = "\t".join(fhrecovery.readline().rstrip().split(",")) + "\n"
+ continue
+ fh2.write(line)
+ fh2.close()
+ fhrecovery.close()
+ fhcdr.close()
+
+# sed -i -e "s/\(LOSS_NONPERF_SEVERITY\\[DEAL,[0-9]*\\]\)=.*$/\1=mkt(70)/g" stonln1_100.sss
diff --git a/python/list_dealnames.py b/python/list_dealnames.py new file mode 100644 index 00000000..8d6a09d4 --- /dev/null +++ b/python/list_dealnames.py @@ -0,0 +1,6 @@ +#! /usr/bin/python2 +import os +filenames = os.listdir("/home/guillaume/share/CorpCDOs/Collaterals") + +for name in sorted([fn.split(",")[0].lower() for fn in filenames]): + print name diff --git a/python/load_bloomberg_data.py b/python/load_bloomberg_data.py new file mode 100644 index 00000000..dba8ae1a --- /dev/null +++ b/python/load_bloomberg_data.py @@ -0,0 +1,72 @@ +import psycopg2
+import os.path
+import datetime
+from datetime import date
+import pdb
+import csv
+
+
+if os.name =='nt':
+ root = "//WDsentinel/share/CorpCDOs/data/bloomberg"
+elif os.name == 'posix':
+ root = '/home/share/CorpCDOS/data/bloomberg'
+
+
+def convertToNone(s):
+ return None if (s=='' or s=='NA') else s
+
+conn = psycopg2.connect(database="ET",
+ user="et_user",
+ password="Serenitas1",
+ host="192.168.1.108")
+cursor = conn.cursor()
+
+for filename in os.listdir(root):
+ cursor.execute("select cusip, pricingdate from historical_bloomberg_corp(%s)", (date.today(),))
+ corpcusips = dict(cursor)
+ with open( os.path.join(root, filename), "r") as fh:
+ dr = csv.DictReader(fh)
+ if "datacorp" in filename:
+ for line in dr:
+ if line["LAST_UPDATE_DT"] != 'NA':
+ line["LAST_UPDATE_DT"] = \
+ datetime.datetime.strptime(line["LAST_UPDATE_DT"], '%Y-%m-%d').date()
+ else:
+ line["LAST_UPDATE_DT"] = \
+ datetime.datetime.strptime(filename.split("_")[2].split(".")[0], '%Y-%m-%d').date()
+ line["PX_LAST"] = None
+ if line["MATURITY"] != 'NA':
+ line["MATURITY"] = datetime.datetime.strptime(line["MATURITY"], '%Y-%m-%d')
+ row = [convertToNone(line[field]) for field in dr.fieldnames]
+ # cursor.execute("SELECT max(PricingDate) from bloomberg_corp where Cusip = %s", (line['CUSIP'],))
+ # currentpricingdate = cursor.fetchone()[0]
+ if line['CUSIP'] not in corpcusips or corpcusips[line['CUSIP']]<line['LAST_UPDATE_DT']:
+ try:
+ cursor.execute("INSERT INTO bloomberg_corp(Cusip, Price, PricingDate, Issuer,"
+ "Maturity, Coupon, CouponType, Frequency, Spread, Libor_floor,"
+ "loan_margin, covlite, secondlien, defaulted, Source)"
+ "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
+ tuple(row))
+ except IndexError:
+ pdb.set_trace()
+ conn.commit()
+ elif "datamtge" in filename:
+ cursor.execute("select * from bloomberg_mtge")
+ mtgecusips = {record[0]: None for record in cursor}
+ for line in dr:
+ if line["MATURITY"] != 'NA':
+ line["MATURITY"] = datetime.datetime.strptime(line["MATURITY"], '%Y-%m-%d').date()
+ row = [convertToNone(line[field]) for field in dr.fieldnames]
+ # sqlstr = "SELECT * from bloomberg_mtge where Cusip=%s"
+ # cursor.execute(sqlstr, (line['CUSIP'],))
+ # found = cursor.fetchone()
+ if line['CUSIP'] not in mtgecusips:
+ cursor.execute("INSERT INTO bloomberg_mtge(Cusip, Issuer,"
+ "Maturity, Coupon, CouponType, Frequency, Spread,"
+ "Moody, InitialMoody)"
+ "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)",
+ tuple(row))
+ conn.commit()
+
+cursor.close()
+conn.close()
diff --git a/python/load_intex_collateral.py b/python/load_intex_collateral.py new file mode 100644 index 00000000..1b1a0861 --- /dev/null +++ b/python/load_intex_collateral.py @@ -0,0 +1,101 @@ +import psycopg2 +import os.path +import re +import datetime +import csv +from datetime import date +import pdb + +if os.name=='nt': + root = "//WDSENTINEL/share/CorpCDOs/" +elif os.name=='posix': + root = "/home/share/CorpCDOs/" + +root = root + "Collaterals_2012-11-20" + +fields = ['Asset Name', 'Issuer', 'Current Balance', 'Maturity Date', 'Asset Subtype', \ + 'Asset Type', 'Gross Coupon', 'Spread', 'Frequency', 'Next Paydate', 'Second Lien', \ + 'LoanX ID', 'CUSIP', 'Market Price', 'Market Price Source', 'Price Date', 'Fixed or Float', \ + 'Defaulted Flag', 'Security Sub-Category', 'Structured Finance Security'] + +def convertToNone(s): + return None if s=='' else s + +conn = psycopg2.connect(database="ET", user="et_user", password="Serenitas1", host="192.168.1.108") +cursor = conn.cursor() +prog = re.compile("\((.*)\)") + +for dealname in os.listdir(root): + #dealname, updatedate = line.rstrip().split() + # updatedate = datetime.datetime.strptime(updatedate, '%m/%d/%Y') + #dealname = dealname.upper() + ",AD.txt" + with open( os.path.join(root, dealname), "r") as fh: + dr = csv.DictReader(fh, dialect = 'excel-tab') + dealname , ending = dealname.split(',') + dealname = dealname.lower() + data = [] + for line in dr: + missingfields = list(set(fields) - set(dr.fieldnames)) + for field in missingfields: + line[field] = None + if line['Fixed or Float'] is not None: + line['Fixed or Float'] = line['Fixed or Float'].upper() + if 'LoanX ID' in dr.fieldnames and len(line['LoanX ID']) >8: + print "dubious id found: {0}".format(line['LoanX ID']) + line['LoanX ID'] = line['LoanX ID'][:8] + try: + # make sure the string is utf8 safe + line['Issuer']= line['Issuer'].decode('windows-1252') + if line['Issuer']=='' or line['Issuer'].lower() == 'dummy': + break + except AttributeError as detail: + print dealname + #convert (x) to -x + for field in ['Spread', 'Gross Coupon']: + if line[field] and prog.match(line[field]): + line[field] = - float(prog.match(line[field]).group(1)) + if line['Market Price'] != '' and isinstance(line['Market Price'], str): + line['Market Price'] = float(line['Market Price'].replace(',','')) + if isinstance(line['Current Balance'], str): + line['Current Balance'] = float(line['Current Balance'].replace(',','')) + try: + r = [convertToNone(line[field]) for field in fields] + except KeyError as detail: + print detail + pdb.set_trace() + data.append(r) + cursor.execute( "select \"Latest Update\" from clo_universe " + "where dealname='{0}'".format(dealname)) + try: + updatedate = cursor.fetchone()[0] + except TypeError: + pdb.set_trace() + cursor.execute( "select max(updatedate) from et_collateral where dealname='{0}'".format(dealname)) + updatedate2 = cursor.fetchone()[0] + if updatedate2 is None: + pdb.set_trace() + if updatedate>updatedate2: + for row in data: + # print dealname, row + sqlstr = "INSERT INTO ET_COLLATERAL(dealname, updatedate, name, IssuerName," \ + "CurrentBalance, Maturity, AssetSubtype, AssetType, GrossCoupon," \ + "Spread, Frequency, NextPaydate, SecondLien, LoanXID, Cusip, IntexPrice," \ + "IntexPriceSource, IntexPriceDate, FixedOrFloat, DefaultedFlag, CovLite, isCDO)" \ + "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s," \ + "%s, %s, %s)" + try: + cursor.execute(sqlstr, (dealname, updatedate) + tuple(row)) + except psycopg2.DataError: + pdb.set_trace() + except psycopg2.IntegrityError: + # crazy hack intex unique id is not really unique + if dealname in ["caval1", "octag11", "windriv2"]: + conn.rollback() + row[0] = "Affinion Group - Tranche B Term Loan - 18" + cursor.execute(sqlstr, (dealname, updatedate) + tuple(row)) + else: + pdb.set_trace() + conn.commit() + +cursor.close() +conn.close() diff --git a/python/load_markit_data.py b/python/load_markit_data.py new file mode 100644 index 00000000..a58341ac --- /dev/null +++ b/python/load_markit_data.py @@ -0,0 +1,59 @@ +import psycopg2 +import os +import os.path +import datetime +from datetime import date +import pdb +import csv + +if os.name =='nt': + root = "//WDsentinel/share/CorpCDOs/data/markit" +elif os.name == 'posix': + root = '/home/share/CorpCDOS/data/markit' + +def convertToNone(s): + return None if (s=='' or s=='NA') else s + +conn = psycopg2.connect(database="ET", + user="et_user", + password="Serenitas1", + host="192.168.1.108") +cursor = conn.cursor() + +fields = ["LoanX ID", "Issuer", "Deal Name", "Facility", "Industry", \ + "Current SP Rating", "Current Moodys Rating", "Initial Amount",\ + "Final Maturity", "Initial Spread", "Bid", "Offer", "Depth", \ + "Spread To Maturity"] + +latestdate = cursor.execute("SELECT MAX(pricingdate) from markit_prices") +latestdate = cursor.fetchone()[0] + +for filename in os.listdir(root): + date = datetime.datetime.strptime(filename.split("_")[2].split(".")[0], '%Y-%m-%d').date() + # only update the new data + if latestdate is None or date > latestdate: + with open( os.path.join(root, filename) ) as fh: + dr = csv.DictReader(fh) + data = [] + for line in dr: + try: + line['Final Maturity'] = \ + datetime.datetime.strptime(line['Final Maturity'], '%d-%b-%y') + except ValueError: + if line['Final Maturity'] !='': + print 'Maturity non empty' + if line['Depth'] == 'implied': + line['Depth']=0 + try: + temp = [convertToNone(line[key]) for key in fields] + except KeyError: + pdb.set_trace() + data.append(temp) + + for row in data: + cursor.execute("INSERT INTO markit_prices(LoanXID, Issuer, DealName," + "Facility, Industry, SP, Moodys, Amount, Maturity," + "Spread, Bid, Offer, Depth, STM, PricingDate)" + "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", + tuple(row) + (date,)) + conn.commit() diff --git a/python/markit_download.py b/python/markit_download.py new file mode 100644 index 00000000..b4145c22 --- /dev/null +++ b/python/markit_download.py @@ -0,0 +1,6 @@ +import requests
+
+# r=requests.get('https://loans.markit.com/loanx/LoanXMarks.csv?LEGALENTITY=serecap&USERNAME=serecapuser&PASSWORD=Welcome1&EOD=Y')
+r = requests.get('https://loans.markit.com/loanx/LoanXFacilityUpdates.csv?LEGALENTITY=serecap&USERNAME=serecapuser&PASSWORD=Welcome1')
+for line in r.text.split('\n'):
+ print line
diff --git a/python/query_runner.py b/python/query_runner.py new file mode 100644 index 00000000..45595f98 --- /dev/null +++ b/python/query_runner.py @@ -0,0 +1,12 @@ +import psycopg2 +conn = psycopg2.connect(database="ET", user="guillaume") +cursor = conn.cursor() + +# cursor.execute("select select a.name,a.issuername,a.loanxid,b.bid from et_collateral a left outer join markit_prices b on a.loanxid=b.loanxid where a.dealname='octagon8'") +cursor.execute("select a.name, COALESCE(b.bid,c.price) as price, a.currentbalance from et_collateral a left outer join markit_prices b on a.loanxid=b.loanxid left outer join bloomberg_prices c on a.cusip=c.cusip where a.dealname='octagon8'") + +for line in cursor: + # import pdb;pdb.set_trace() + print "\t".join(map(str,line)) +cursor.close() +conn.close() diff --git a/python/select_clean.py b/python/select_clean.py new file mode 100644 index 00000000..f08fc0b2 --- /dev/null +++ b/python/select_clean.py @@ -0,0 +1,12 @@ +import os +root = "/home/guillaume/share/CorpCDOs/Collaterals" +for dealname in os.listdir(root): + + with open( os.path.join(root, dealname), "r") as fh: + # import pdb; pdb.set_trace() + header = fh.readline().rstrip().split("\t") + if "Underlying Deal" in header: + print dealname + line = fh.readline().split("\t") + if line[0] == "Sum/WAVG": + print dealname |
