diff options
| -rw-r--r-- | python/intex_scenarios.py | 43 | ||||
| -rw-r--r-- | python/load_indicative.py | 33 | ||||
| -rw-r--r-- | python/load_intex_collateral.py | 18 |
3 files changed, 46 insertions, 48 deletions
diff --git a/python/intex_scenarios.py b/python/intex_scenarios.py index 31c5d445..58516799 100644 --- a/python/intex_scenarios.py +++ b/python/intex_scenarios.py @@ -6,8 +6,8 @@ from csv import reader import json
import re
import psycopg2
-import common
-from common import query_db, root
+from common import root
+from db import conn, query_db, with_connection
import sys
import yaml
@@ -25,31 +25,28 @@ pattern9 = re.compile("(?P<a>SEVERITY\[\w+,\d+\]=)mkt\(70\)") global_reinvfloatpercentage = 84
global_reinvfixedpercentage = 16
-def dealname_from_cusip(conn, cusips, distinct=False):
- with conn.cursor() as c:
- if distinct:
- sqlstr = "select distinct *"
- else:
- sqlstr = "select * "
- sqlstr += "from dealname_from_cusip({0})".format(",",join(["%s"]*len(cusip)))
- c.execute(sqlstr, params = cusip)
- dealnames = [d[0] for d in c.fetchall()]
+@with_connection
+def dealname_from_cusip(conn, cusips):
+ c = conn.cursor()
+ c.callproc("dealname_from_cusip", params = cusip)
+ dealnames = [d[0] for d in c.fetchall()]
+ c.close()
return dealnames
-def get_reinv_assets(conn, dealname, workdate):
+def get_reinv_assets(dealname, workdate):
sqlstr = 'select * from et_historicaldealinfo(%s, %s) where ReinvFlag IS TRUE'
- reinvassets = query_db(conn, sqlstr, params = (dealname, workdate), one = False)
+ reinvassets = query_db(sqlstr, params = (dealname, workdate), one = False)
d = {}
for line in reinvassets:
d[line[3]] = line[22]
return d
-def get_recovery(conn, dealname, workdate, defaultrecovery = 50):
+def get_recovery(dealname, workdate, defaultrecovery = 50):
""" compute average price of defaulted assets
"""
sqlstr = "select sum(coalesce(price, %s) * currentbalance)/sum(currentbalance) " + \
"from et_aggdealinfo_historical(%s, %s) where defaultedflag is True"
- recovery = query_db(conn, sqlstr, params = (defaultrecovery, dealname, workdate))
+ recovery = query_db(sqlstr, params = (defaultrecovery, dealname, workdate))
try:
recovery = recovery[0]
@@ -60,27 +57,27 @@ def get_recovery(conn, dealname, workdate, defaultrecovery = 50): recovery = defaultrecovery
return float(recovery)
-def get_reinvenddate(conn, dealname):
+def get_reinvenddate(dealname):
sqlstr = 'SELECT \"Reinv End Date\" from latest_clo_universe where dealname=%s'
- reinvenddate = query_db(conn, sqlstr, params = (dealname,))[0]
+ reinvenddate = query_db(sqlstr, params = (dealname,))[0]
if reinvenddate:
reinvenddate = reinvenddate.strftime("%Y%m%d")
return reinvenddate
else:
raise Exception("missing reinvestment end date")
-def generate_scenarios(conn, workdate, dealname):
+def generate_scenarios(workdate, dealname):
prometheus = os.path.join(root, "Scenarios", "prometheus.sss")
n_scenarios = 100
basedir = os.path.join(root, "Scenarios", "Intex curves_" + workdate)
- defaultedprice = get_recovery(conn, dealname, workdate)
+ defaultedprice = get_recovery(dealname, workdate)
replace = "\g<a>{0:.3f}".format(defaultedprice)
with open(os.path.join(basedir, "csv", dealname + ".config")) as fh:
config = yaml.load(fh)
reinvflag = config['reinvflag']
if reinvflag:
- reinvenddate = get_reinvenddate(conn, dealname)
- reinv_assets = get_reinv_assets(conn, dealname, workdate)
+ reinvenddate = get_reinvenddate(dealname)
+ reinv_assets = get_reinv_assets(dealname, workdate)
n_float_assets = len([v for v in reinv_assets.values() if v == 'FLOAT'])
n_fixed_assets = len([v for v in reinv_assets.values() if v == 'FIXED'])
rollingmat = config['rollingmat']
@@ -236,5 +233,5 @@ if __name__ == "__main__": "Intex curves_" + workdate, "csv"))
if "RData" in d]
for dealname in dealnames:
- generate_scenarios(common.conn, workdate, dealname)
- common.conn.close()
+ generate_scenarios(workdate, dealname)
+ conn.close()
diff --git a/python/load_indicative.py b/python/load_indicative.py index 807c73a3..03dad396 100644 --- a/python/load_indicative.py +++ b/python/load_indicative.py @@ -6,8 +6,8 @@ from datetime import date import csv import pdb import sys -import common -from common import query_db +from common import root +from db import conn, query_db, with_connection def convertToNone(s): return None if s=="" or s=="-" or s=="NR" else s @@ -22,7 +22,8 @@ def sanitize_float(intex_float): intex_float = float(intex_float) return intex_float -def upload_cusip_data(filename, conn): +@with_connection +def upload_cusip_data(conn, filename): dealupdate = {} with open( filename, "r") as fh: dr = csv.DictReader(fh, dialect='excel-tab') @@ -40,12 +41,12 @@ def upload_cusip_data(filename, conn): dealname = line['dealname'] line = {k: convertToNone(v) for k, v in line.items()} if dealname not in dealupdate: - dealupdate[dealname] = query_db(conn, "SELECT \"Latest Update\" FROM clo_universe " \ + dealupdate[dealname] = query_db("SELECT \"Latest Update\" FROM clo_universe " \ "WHERE dealname = %s ORDER BY \"Latest Update\" DESC", params = (dealname,))[0] sqlstring = "SELECT updatedate FROM latest_cusip_universe WHERE cusip = %s" - curr_date = query_db(conn, sqlstring, params = (line['CUSIP'],)) + curr_date = query_db(sqlstring, params = (line['CUSIP'],)) if not curr_date or curr_date[0] < dealupdate[dealname]: if dealname not in deals_to_update: deals_to_update.append(dealname) @@ -80,7 +81,7 @@ def upload_cusip_data(filename, conn): conn.commit() for dealname in deals_to_update: - data = query_db(conn, "SELECT p_cusip, p_curr_subordination, "\ + data = query_db("SELECT p_cusip, p_curr_subordination, "\ "p_curr_thickness from et_deal_subordination(%s)", params = (dealname,), one = False) @@ -91,13 +92,13 @@ def upload_cusip_data(filename, conn): "updatedate = %s", data) conn.commit() -def upload_deal_data(filename, conn): +@with_connection +def upload_deal_data(conn, filename): sqlstr = "select dealname, max(\"Latest Update\") from clo_universe group by dealname" - deallist = dict(query_db(conn, sqlstr, one=False)) + deallist = dict(query_db(sqlstr, one=False)) with open( filename, "r") as fh: dr = csv.DictReader(fh, dialect='excel-tab') data = [] - c = conn.cursor() for line in dr: if not line ['Deal Name'] or (line['Deal Name'] == 'Unknown Security'): continue @@ -153,15 +154,15 @@ if __name__=="__main__": workdate = sys.argv[1] else: workdate = str(datetime.date.today()) - files = [os.path.join(common.root, "data", "Indicative_" + workdate, f) for f in - os.listdir(os.path.join(common.root, "data", "Indicative_" + workdate))] + files = [os.path.join(root, "data", "Indicative_" + workdate, f) for f in + os.listdir(os.path.join(root, "data", "Indicative_" + workdate))] cusip_files = [f for f in files if "TrInfo" in f] deal_files = [f for f in files if "TrInfo" not in f] #first load deal data - for f in deal_files: - upload_deal_data(f, common.conn) + for deal in deal_files: + upload_deal_data(deal) #then load tranche data - for f in cusip_files: - upload_cusip_data(f, common.conn) - common.conn.close() + for cusip in cusip_files: + upload_cusip_data(cusip) + conn.close() diff --git a/python/load_intex_collateral.py b/python/load_intex_collateral.py index e78444cd..d6224fa2 100644 --- a/python/load_intex_collateral.py +++ b/python/load_intex_collateral.py @@ -4,8 +4,8 @@ import re import csv import datetime import pdb -import common -from common import query_db +from common import root +from db import conn, query_db import sys fields = ['Asset Name', 'Issuer', 'Contributed Balance', 'Maturity Date', \ @@ -26,12 +26,12 @@ def sanitize_float(intex_float): intex_float = float(intex_float) return intex_float -def upload_data(dealnames, workdate, conn): +def upload_data(conn, dealnames, workdate): for dealname in dealnames: #dealname, updatedate = line.rstrip().split() # updatedate = datetime.datetime.strptime(updatedate, '%m/%d/%Y') #dealname = dealname.upper() + ",AD.txt - with open( os.path.join(common.root, "data", "Collaterals_" + workdate, dealname.upper() + "_AD.txt"), + with open( os.path.join(root, "data", "Collaterals_" + workdate, dealname.upper() + "_AD.txt"), "r", encoding='windows-1252') as fh: dr = csv.DictReader(fh, dialect = 'excel-tab') missingfields = set(fields) - set(dr.fieldnames) @@ -87,15 +87,15 @@ def upload_data(dealnames, workdate, conn): data.append(r) sqlstr = "select distinct(updatedate) from et_collateral where dealname= %s" - old_update_dates = [date[0] for date in query_db(conn, sqlstr, params=(dealname,), one=False)] + old_update_dates = [date[0] for date in query_db(sqlstr, params=(dealname,), one=False)] sqlstr = 'select max("Latest Update") from clo_universe where dealname= %s' - updatedate = query_db(conn, sqlstr, params=(dealname,))[0] + updatedate = query_db(sqlstr, params=(dealname,))[0] reinsert = False if updatedate in old_update_dates: sqlstr = "SELECT count(*) FROM et_collateral where dealname = %s and updatedate= %s" - currlen = query_db(conn, sqlstr, params = (dealname, updatedate))[0] + currlen = query_db(sqlstr, params = (dealname, updatedate))[0] if currlen != len(data): print("{0} has {1} rows in the database and current collateral file has {2}".format(dealname, currlen, len(data))) with conn.cursor() as c: @@ -149,6 +149,6 @@ if __name__ == "__main__": else: dealnames = [re.sub("_AD.txt", "", d).lower() for d in os.listdir(os.path.join(common.root, "data", "Collaterals_" + workdate))] - upload_data(dealnames, workdate, common.conn) - common.conn.close() + upload_data(conn, dealnames, workdate) + conn.close() print("done") |
