aboutsummaryrefslogtreecommitdiffstats
path: root/python/load_intex_collateral.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/load_intex_collateral.py')
-rw-r--r--python/load_intex_collateral.py26
1 files changed, 8 insertions, 18 deletions
diff --git a/python/load_intex_collateral.py b/python/load_intex_collateral.py
index 9065471b..633dfb74 100644
--- a/python/load_intex_collateral.py
+++ b/python/load_intex_collateral.py
@@ -4,7 +4,7 @@ import re
import csv
import datetime
import pdb
-from common import root
+from common import root, sanitize_float
from db import conn, query_db
import sys
import uuid
@@ -20,16 +20,6 @@ fields = ['Asset Name', 'Issuer', 'Contributed Balance', 'Maturity Date', \
def convertToNone(s):
return None if s=='' else s
-def sanitize_float(intex_float):
- try:
- intex_float = intex_float.replace(",", "")
- if "(" in intex_float:
- return - float(intex_float[1:-1])
- else:
- return float(intex_float)
- except (AttributeError, ValueError):
- return intex_float
-
def upload_data(conn, dealnames, workdate):
for dealname in dealnames:
#dealname, updatedate = line.rstrip().split()
@@ -77,7 +67,7 @@ def upload_data(conn, dealnames, workdate):
line[field] = sanitize_float(line[field])
if line['Market Price'] == 0:
line['Market Price'] = None
- #we store the Libor FLoor in the database, so Life Floor is really Libor Floor
+ #we store the Libor Floor in the database, so Life Floor is really Libor Floor
if line['Life Floor'] == "No limit":
line['Life Floor'] = 0
elif line['Life Floor']:
@@ -98,8 +88,8 @@ def upload_data(conn, dealnames, workdate):
sqlstr = "select distinct(updatedate) from et_collateral where dealname= %s"
old_update_dates = [date[0] for date in query_db(sqlstr, params=(dealname,), one=False)]
- sqlstr = "select max(\"Latest Update\") from clo_universe where dealname= %s"
- updatedate = query_db(sqlstr, params = (dealname,))[0]
+ sqlstr = "select max(\"Latest Update\") from clo_universe where dealname= %s and \"Latest Update\"<=%s"
+ updatedate = query_db(sqlstr, params = (dealname, workdate))[0]
# sanity check if we already have the data
reinsert = False
if updatedate in old_update_dates:
@@ -145,11 +135,11 @@ if __name__ == "__main__":
cusip_files = [f for f in files if "TrInfo" in f]
deal_files = [f for f in files if "TrInfo" not in f]
#first load deal data
- for deal in deal_files:
- upload_deal_data(deal)
+ for deal_file in deal_files:
+ upload_deal_data(deal_file)
#then load tranche data
- for cusip in cusip_files:
- upload_cusip_data(cusip)
+ for cusip_file in cusip_files:
+ upload_cusip_data(cusip_file)
upload_data(conn, dealnames, workdate)
conn.close()