aboutsummaryrefslogtreecommitdiffstats
path: root/python/intex/load_indicative.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/intex/load_indicative.py')
-rw-r--r--python/intex/load_indicative.py205
1 files changed, 137 insertions, 68 deletions
diff --git a/python/intex/load_indicative.py b/python/intex/load_indicative.py
index 4e6c05c5..53124801 100644
--- a/python/intex/load_indicative.py
+++ b/python/intex/load_indicative.py
@@ -12,28 +12,46 @@ import sys
logger = logging.getLogger(__name__)
+
def convertToNone(s):
return None if s in ["", "-", "NR"] else s
def insert_new_cusip(conn, line):
- if line['Pari-Passu Tranches']:
- line['Pari-Passu Tranches'] = line['Pari-Passu Tranches'].split(",")
- to_insert = (line['CUSIP'], line['ISIN'], line['Bloomberg Ticker'],
- line['dealname'], line['tranche'], line['Pari-Passu Tranches'])
- for key in ['Orig Balance', 'Orig Attachment Point', 'Orig Detachment Point',
- 'Floater Spread/Margin']:
+ if line["Pari-Passu Tranches"]:
+ line["Pari-Passu Tranches"] = line["Pari-Passu Tranches"].split(",")
+ to_insert = (
+ line["CUSIP"],
+ line["ISIN"],
+ line["Bloomberg Ticker"],
+ line["dealname"],
+ line["tranche"],
+ line["Pari-Passu Tranches"],
+ )
+ for key in [
+ "Orig Balance",
+ "Orig Attachment Point",
+ "Orig Detachment Point",
+ "Floater Spread/Margin",
+ ]:
if line[key]:
line[key] = sanitize_float(line[key])
line[key] = convertToNone(line[key])
- to_insert += (line['Orig Balance'], line.get('Orig Moody'),
- line['Orig Attachment Point'], line['Orig Detachment Point'],
- line['Floater Index'], line['Floater Spread/Margin'],
- line['Type'])
- sqlstr = ("INSERT INTO cusip_ref(Cusip, ISIN, bloomberg_ticker, dealname, "
- "tranche, paripassu_tranches, Orig_Balance, Orig_Moody, Orig_Attach, "
- "Orig_Detach, Floater_Index, Spread, type) VALUES({0}) "
- "RETURNING cusip_id".format(",".join(["%s"] * 13)))
+ to_insert += (
+ line["Orig Balance"],
+ line.get("Orig Moody"),
+ line["Orig Attachment Point"],
+ line["Orig Detachment Point"],
+ line["Floater Index"],
+ line["Floater Spread/Margin"],
+ line["Type"],
+ )
+ sqlstr = (
+ "INSERT INTO cusip_ref(Cusip, ISIN, bloomberg_ticker, dealname, "
+ "tranche, paripassu_tranches, Orig_Balance, Orig_Moody, Orig_Attach, "
+ "Orig_Detach, Floater_Index, Spread, type) VALUES({0}) "
+ "RETURNING cusip_id".format(",".join(["%s"] * 13))
+ )
with conn.cursor() as c:
try:
c.execute(sqlstr, to_insert)
@@ -46,34 +64,37 @@ def insert_new_cusip(conn, line):
def upload_cusip_data(conn, filename):
dealupdate = {}
- with open( filename, "r") as fh:
- dr = csv.DictReader(fh, dialect='excel-tab')
+ with open(filename, "r") as fh:
+ dr = csv.DictReader(fh, dialect="excel-tab")
data = []
deals_to_update = []
for line in dr:
if "ISIN" not in line:
- line['ISIN'] = None
+ line["ISIN"] = None
sp = line["Tranche"].split(",")
if len(sp) == 2:
line["dealname"], line["tranche"] = sp
else:
continue
line["dealname"] = line["dealname"].lower()
- dealname = line['dealname']
+ dealname = line["dealname"]
line = {k: convertToNone(v) for k, v in line.items()}
if dealname not in dealupdate:
with conn.cursor() as c:
- c.execute("SELECT max(\"Latest Update\") FROM clo_universe "
- "WHERE dealname = %s", (dealname,))
+ c.execute(
+ 'SELECT max("Latest Update") FROM clo_universe '
+ "WHERE dealname = %s",
+ (dealname,),
+ )
try:
dealupdate[dealname], = c.fetchone()
except TypeError:
- logging.error(f'deal:{dealname} not in database')
+ logging.error(f"deal:{dealname} not in database")
continue
sqlstring = "SELECT cusip_id FROM cusip_ref WHERE cusip=%s and dealname=%s"
with conn.cursor() as c:
- c.execute(sqlstring, (line['CUSIP'], dealname))
+ c.execute(sqlstring, (line["CUSIP"], dealname))
r = c.fetchone()
if r is None:
try:
@@ -88,26 +109,41 @@ def upload_cusip_data(conn, filename):
curr_date, = c.fetchone()
if curr_date is None or curr_date < dealupdate[dealname]:
try:
- for key in ['Curr Balance', 'Curr Attachment Point (def at MV)',
- 'Curr Detachment Point (def at MV)', 'Factor', 'Coupon']:
+ for key in [
+ "Curr Balance",
+ "Curr Attachment Point (def at MV)",
+ "Curr Detachment Point (def at MV)",
+ "Factor",
+ "Coupon",
+ ]:
if line[key]:
line[key] = sanitize_float(line[key])
line[key] = convertToNone(line[key])
except ValueError:
continue
- line['Curr Moody'] = line.get('Curr Moody') or line.get('Orig Moody')
- sqlstring = "INSERT INTO cusip_update VALUES({0})".format(",".join(["%s"] * 8))
- to_insert = (cusip_id, line['Curr Balance'], line['Factor'], line['Coupon'],
- line['Curr Moody'], line['Curr Attachment Point (def at MV)'],
- line['Curr Detachment Point (def at MV)'], dealupdate[dealname])
+ line["Curr Moody"] = line.get("Curr Moody") or line.get("Orig Moody")
+ sqlstring = "INSERT INTO cusip_update VALUES({0})".format(
+ ",".join(["%s"] * 8)
+ )
+ to_insert = (
+ cusip_id,
+ line["Curr Balance"],
+ line["Factor"],
+ line["Coupon"],
+ line["Curr Moody"],
+ line["Curr Attachment Point (def at MV)"],
+ line["Curr Detachment Point (def at MV)"],
+ dealupdate[dealname],
+ )
with conn.cursor() as c:
try:
c.execute(sqlstring, to_insert)
except (psycopg2.DataError, psycopg2.IntegrityError) as e:
logger.error(e)
- logger.debug("uploaded: {0}".format(line['CUSIP']))
+ logger.debug("uploaded: {0}".format(line["CUSIP"]))
conn.commit()
+
def upload_deal_data(conn, filename):
sqlstr = 'SELECT dealname, array_agg("Latest Update") FROM clo_universe GROUP BY dealname'
with conn.cursor() as c:
@@ -119,52 +155,76 @@ def upload_deal_data(conn, filename):
deallist2 = set([d for d, in c])
conn.commit()
with open(filename, "r") as fh:
- dr = csv.DictReader(fh, dialect='excel-tab')
+ dr = csv.DictReader(fh, dialect="excel-tab")
data = []
for line in dr:
- if not line['Deal Name, Tranche Name'] or (line['Deal Name, Tranche Name'] == 'Unknown Security'):
+ if not line["Deal Name, Tranche Name"] or (
+ line["Deal Name, Tranche Name"] == "Unknown Security"
+ ):
continue
- if not line['Latest Update']:
+ if not line["Latest Update"]:
continue
for key in line.keys():
line[key] = convertToNone(line[key])
##simpler names
- line["CDOpercent"] = line["CDO Pct of Assets that are Structured Finance Obligations"]
- line['defaultedbal'] = line["CDO Defaulted Security Balance (Reported)"]
+ line["CDOpercent"] = line[
+ "CDO Pct of Assets that are Structured Finance Obligations"
+ ]
+ line["defaultedbal"] = line["CDO Defaulted Security Balance (Reported)"]
line["Paid Down"] = None
if "Paid Down" in line["Latest Update"]:
- line["Paid Down"] = re.sub("Paid Down: ","", line["Latest Update"])
+ line["Paid Down"] = re.sub("Paid Down: ", "", line["Latest Update"])
line["Latest Update"] = line["Paid Down"]
- for field in ["Deal Closing Date", "Deal Termination Date", "Reinv End Date", \
- "Latest Update", "Pay Day", "Deal First Pay Date", "Paid Down",
- "Deal Last Refi Date"]:
+ for field in [
+ "Deal Closing Date",
+ "Deal Termination Date",
+ "Reinv End Date",
+ "Latest Update",
+ "Pay Day",
+ "Deal First Pay Date",
+ "Paid Down",
+ "Deal Last Refi Date",
+ ]:
if line[field]:
try:
- line[field] = datetime.datetime.strptime(line[field], '%b %d, %Y').date()
+ line[field] = datetime.datetime.strptime(
+ line[field], "%b %d, %Y"
+ ).date()
except ValueError:
logger.error("Can't parse date {}".format(line[field]))
pdb.set_trace()
if line["Pay Day"]:
line["Pay Day"] = line["Pay Day"].day
- for key in ["Principal Collection Account", "Interest Collection Account",
- "Curr Deal Bal", "Tranche Curr Bal", "CDOpercent", "defaultedbal",
- "Orig Deal Bal", "Tranche Orig Bal"]:
+ for key in [
+ "Principal Collection Account",
+ "Interest Collection Account",
+ "Curr Deal Bal",
+ "Tranche Curr Bal",
+ "CDOpercent",
+ "defaultedbal",
+ "Orig Deal Bal",
+ "Tranche Orig Bal",
+ ]:
if line[key]:
line[key] = sanitize_float(line[key])
- line['Deal/Tranche ID'] = line['Deal/Tranche ID'].lower()
- dealname = line['Deal/Tranche ID']
+ line["Deal/Tranche ID"] = line["Deal/Tranche ID"].lower()
+ dealname = line["Deal/Tranche ID"]
if dealname not in deallist2:
- sqlstr = "INSERT INTO deal_indicative VALUES( %(Deal/Tranche ID)s, " \
- "%(Deal Name)s, %(Collateral Manager)s, " \
- "%(Deal Closing Date)s, %(Pay Day)s, " \
- "%(Deal First Pay Date)s, %(Paid Down)s)"
+ sqlstr = (
+ "INSERT INTO deal_indicative VALUES( %(Deal/Tranche ID)s, "
+ "%(Deal Name)s, %(Collateral Manager)s, "
+ "%(Deal Closing Date)s, %(Pay Day)s, "
+ "%(Deal First Pay Date)s, %(Paid Down)s)"
+ )
else:
- #we always update paid_down
- sqlstr = 'UPDATE deal_indicative SET paid_down=%(Paid Down)s ' \
- 'WHERE dealname=%(Deal/Tranche ID)s'
+ # we always update paid_down
+ sqlstr = (
+ "UPDATE deal_indicative SET paid_down=%(Paid Down)s "
+ "WHERE dealname=%(Deal/Tranche ID)s"
+ )
- if line['Deal CUSIP List']:
- line['Deal CUSIP List'] = line['Deal CUSIP List'].split(",")
+ if line["Deal CUSIP List"]:
+ line["Deal CUSIP List"] = line["Deal CUSIP List"].split(",")
try:
with conn.cursor() as c:
c.execute(sqlstr, line)
@@ -175,38 +235,47 @@ def upload_deal_data(conn, filename):
# update deallist2
if dealname not in deallist2:
deallist2.add(dealname)
- if dealname not in deallist1 or line['Latest Update'] not in deallist1[dealname]:
- sqlstring = \
- "INSERT INTO clo_universe " \
- "VALUES (%(Deal/Tranche ID)s, %(Curr Deal Bal)s, %(Tranche Curr Bal)s, " \
- "%(Tranche Factor)s, %(Principal Collection Account)s, " \
- "%(Interest Collection Account)s, %(CDOpercent)s, %(defaultedbal)s, " \
- "%(Coupon)s, %(Latest Update)s, %(Deal Last Refi Date)s, " \
- "%(Deal CUSIP List)s, %(Deal Termination Date)s, %(Reinv End Date)s, " \
+ if (
+ dealname not in deallist1
+ or line["Latest Update"] not in deallist1[dealname]
+ ):
+ sqlstring = (
+ "INSERT INTO clo_universe "
+ "VALUES (%(Deal/Tranche ID)s, %(Curr Deal Bal)s, %(Tranche Curr Bal)s, "
+ "%(Tranche Factor)s, %(Principal Collection Account)s, "
+ "%(Interest Collection Account)s, %(CDOpercent)s, %(defaultedbal)s, "
+ "%(Coupon)s, %(Latest Update)s, %(Deal Last Refi Date)s, "
+ "%(Deal CUSIP List)s, %(Deal Termination Date)s, %(Reinv End Date)s, "
"%(Orig Deal Bal)s, %(Tranche Orig Bal)s)"
+ )
try:
with conn.cursor() as c:
c.execute(sqlstring, line)
- deallist1[dealname] = [line['Latest Update']]
+ deallist1[dealname] = [line["Latest Update"]]
except (psycopg2.DataError, KeyError) as detail:
logger.error(detail)
pdb.set_trace()
conn.commit()
+
if __name__ == "__main__":
if len(sys.argv) > 1:
workdate = sys.argv[1]
else:
workdate = str(datetime.date.today())
- files = [os.path.join(os.environ['DATA_DIR'], "Indicative_" + workdate, f) for f in
- os.listdir(os.path.join(os.environ['DATA_DIR'], "Indicative_" + workdate))]
+ files = [
+ os.path.join(os.environ["DATA_DIR"], "Indicative_" + workdate, f)
+ for f in os.listdir(
+ os.path.join(os.environ["DATA_DIR"], "Indicative_" + workdate)
+ )
+ ]
cusip_files = [f for f in files if "TrInfo" in f]
deal_files = [f for f in files if "TrInfo" not in f]
- with closing(dbconn('etdb')) as etdb:
- #first load deal data
+ with closing(dbconn("etdb")) as etdb:
+ # first load deal data
for deal in deal_files:
upload_deal_data(etdb, deal)
- #then load tranche data
+ # then load tranche data
for cusip in cusip_files:
upload_cusip_data(etdb, cusip)