aboutsummaryrefslogtreecommitdiffstats
path: root/python/load_intex_collateral.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/load_intex_collateral.py')
-rw-r--r--python/load_intex_collateral.py30
1 files changed, 17 insertions, 13 deletions
diff --git a/python/load_intex_collateral.py b/python/load_intex_collateral.py
index 60aabbdd..c2cb9c19 100644
--- a/python/load_intex_collateral.py
+++ b/python/load_intex_collateral.py
@@ -5,6 +5,7 @@ import csv
import datetime
import pdb
import common
+from common import query_db
import sys
fields = ['Asset Name', 'Issuer', 'Contributed Balance', 'Maturity Date', \
@@ -33,12 +34,13 @@ def get_latest_dealupdate(workdate):
for line in dr if 'Paid' not in line['Latest Update'] and line['Latest Update']}
return deal_table
-def upload_data(dealnames, workdate, conn, cursor):
+def upload_data(dealnames, workdate, conn):
for dealname in dealnames:
#dealname, updatedate = line.rstrip().split()
# updatedate = datetime.datetime.strptime(updatedate, '%m/%d/%Y')
#dealname = dealname.upper() + ",AD.txt
- with open( os.path.join(common.root, "data", "Collaterals_" + workdate, dealname.upper() + "_AD.txt"), "r", encoding='windows-1252') as fh:
+ with open( os.path.join(common.root, "data", "Collaterals_" + workdate, dealname.upper() + "_AD.txt"),
+ "r", encoding='windows-1252') as fh:
dr = csv.DictReader(fh, dialect = 'excel-tab')
missingfields = set(fields) - set(dr.fieldnames)
if "LoanX ID" in missingfields:
@@ -92,8 +94,8 @@ def upload_data(dealnames, workdate, conn, cursor):
pdb.set_trace()
data.append(r)
- cursor.execute( "select distinct(updatedate) from et_collateral where dealname= %s", (dealname,))
- old_update_dates = [date[0] for date in cursor.fetchall()]
+ sqlstr = "select distinct(updatedate) from et_collateral where dealname= %s"
+ old_update_dates = [date[0] for date in query_db(sqlstr, params=(dealname,), one=False)]
deal_table = get_latest_dealupdate(workdate)
try:
@@ -105,16 +107,18 @@ def upload_data(dealnames, workdate, conn, cursor):
reinsert = False
if updatedate in old_update_dates:
sqlstr = "SELECT count(*) FROM et_collateral where dealname = %s and updatedate= %s"
- cursor.execute(sqlstr, (dealname, updatedate))
- currlen = cursor.fetchone()[0]
+ currlen = query_db(sqlstr, params = (dealname, updatedate))[0]
if currlen != len(data):
print("{0} has {1} rows in the database and current collateral file has {2}".format(dealname, currlen, len(data)))
+ c = conn.cursor()
sqlstr = "DELETE FROM et_collateral where dealname = %s and updatedate = %s"
- cursor.execute(sqlstr, (dealname, updatedate))
+ c.execute(sqlstr, (dealname, updatedate))
conn.commit()
+ c.close()
reinsert = True
if reinsert or not old_update_dates or updatedate not in old_update_dates:
tag = 0
+ c = conn.cursor()
for row in data:
# these next three ifs are to take care of reinvestment asset lines
if not row[0]:
@@ -132,7 +136,7 @@ def upload_data(dealnames, workdate, conn, cursor):
sqlstr = "INSERT INTO ET_COLLATERAL({0}) VALUES({1})".format(",".join(sql_fields),
",".join(["%s"] * len(sql_fields)))
try:
- cursor.execute(sqlstr, (dealname, updatedate) + tuple(row))
+ c.execute(sqlstr, (dealname, updatedate) + tuple(row))
except psycopg2.DataError as detail:
print(detail)
pdb.set_trace()
@@ -143,11 +147,11 @@ def upload_data(dealnames, workdate, conn, cursor):
# make sure the loan name is unique by tagging it
row[0] = row[0] + "_tag_" + str(tag)
tag = tag + 1
- cursor.execute(sqlstr, (dealname, updatedate) + tuple(row))
+ c.execute(sqlstr, (dealname, updatedate) + tuple(row))
except TypeError:
pdb.set_trace()
conn.commit()
-
+ c.close()
if __name__ == "__main__":
if len(sys.argv) > 1:
workdate = sys.argv[1]
@@ -156,8 +160,8 @@ if __name__ == "__main__":
if len(sys.argv) > 2:
dealnames = sys.argv[2:]
else:
- dealnames = [re.sub("_AD.txt", "", d).lower() for d in os.listdir(os.path.join(common.root, "data", "Collaterals_" + workdate))]
- upload_data(dealnames, workdate, common.conn, common.cursor)
- common.cursor.close()
+ dealnames = [re.sub("_AD.txt", "", d).lower() for d in
+ os.listdir(os.path.join(common.root, "data", "Collaterals_" + workdate))]
+ upload_data(dealnames, workdate, common.conn)
common.conn.close()
print("done")