aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--python/http-server.py12
-rw-r--r--python/intex/load_indicative.py10
-rw-r--r--python/intex/load_intex_collateral.py10
3 files changed, 20 insertions, 12 deletions
diff --git a/python/http-server.py b/python/http-server.py
index c2058cd0..496a55f6 100644
--- a/python/http-server.py
+++ b/python/http-server.py
@@ -1,8 +1,12 @@
-import logging
from task_server import app
+import logging
-file_handler = logging.FileHandler(filename='/home/share/CorpCDOs/logs/tasks.log')
-file_handler.setLevel(logging.INFO)
-app.logger.addHandler(file_handler)
+fh = logging.FileHandler(filename='/home/share/CorpCDOs/logs/tasks.log')
+fh.setLevel(logging.INFO)
+formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+fh.setFormatter(formatter)
+logger = logging.getLogger('intex')
+logger.addHandler(fh)
+app.logger.addHandler(fh)
app.run(host='0.0.0.0', port = 8000, debug=True)
diff --git a/python/intex/load_indicative.py b/python/intex/load_indicative.py
index 8fd75b6e..d8d87913 100644
--- a/python/intex/load_indicative.py
+++ b/python/intex/load_indicative.py
@@ -8,6 +8,8 @@ from common import root, sanitize_float
from db import conn
import logging
+logger = logging.getLogger(__name__)
+
def convertToNone(s):
return None if s in ["", "-", "NR"] else s
@@ -75,8 +77,8 @@ def upload_cusip_data(conn, filename):
with conn.cursor() as c:
c.execute(sqlstring, line)
except psycopg2.DataError as e:
- logging.error(e)
- logging.debug("uploaded: {0}".format(line['CUSIP']))
+ logger.error(e)
+ logger.debug("uploaded: {0}".format(line['CUSIP']))
conn.commit()
# for dealname in deals_to_update:
# with conn.cursor() as c:
@@ -146,7 +148,7 @@ def upload_deal_data(conn, filename):
with conn.cursor() as c:
c.execute(sqlstr, line)
except (psycopg2.DataError, KeyError) as detail:
- logging.error(detail)
+ logger.error(detail)
pdb.set_trace()
with conn.cursor() as c:
if line['Paid Down']:
@@ -164,7 +166,7 @@ def upload_deal_data(conn, filename):
c.execute(sqlstring, line)
deallist1[dealname] = [line['Latest Update']]
except (psycopg2.DataError, KeyError) as detail:
- logging.error(detail)
+ logger.error(detail)
pdb.set_trace()
conn.commit()
diff --git a/python/intex/load_intex_collateral.py b/python/intex/load_intex_collateral.py
index 1b0a7c45..2e42ead0 100644
--- a/python/intex/load_intex_collateral.py
+++ b/python/intex/load_intex_collateral.py
@@ -8,6 +8,8 @@ import uuid
from load_indicative import upload_cusip_data, upload_deal_data
import logging
+logger = logging.getLogger(__name__)
+
fields = ['Asset Name', 'Issuer', 'Contributed Balance', 'Asset Maturity Date',
'Asset Subtype', 'Asset Type', 'Gross Coupon', 'Spread', \
'Frequency', 'Next Paydate', 'Second Lien', 'LoanX ID', 'CUSIP',
@@ -36,7 +38,7 @@ def upload_data(conn, dealnames, workdate):
missingfields = set(fields).union({'Gross Margin'}) - set(dr.fieldnames)
if "LoanX ID" in missingfields:
msg = "{0}: LoanX ID column is missing. Probably an error in exporting from intex"
- logging.warning(msg.format(dealname))
+ logger.warning(msg.format(dealname))
data = {}
for line in dr:
for f in missingfields:
@@ -49,7 +51,7 @@ def upload_data(conn, dealnames, workdate):
for key, l in [('LoanX ID', 8), ('CUSIP', 9), ('Asset Subtype', 10)]:
if line[key]:
if len(line[key]) > l:
- logging.warning("dubious {0} found: {1}".format(key, line[key]))
+ logger.warning("dubious {0} found: {1}".format(key, line[key]))
line[key] = line[key][:l]
if 'Reinvest Collat' in line and line['Reinvest Collat'].upper() == 'Y':
@@ -98,7 +100,7 @@ def upload_data(conn, dealnames, workdate):
currlen = c.fetchone()[0]
conn.commit()
if currlen != len(data): #then we delete and just reupload
- logging.warning("{0} has {1} rows in the database " \
+ logger.warning("{0} has {1} rows in the database " \
"and current collateral file has {2}".format(dealname, currlen, len(data)))
with conn.cursor() as c:
sqlstr = "DELETE FROM et_collateral where dealname = %s and updatedate = %s"
@@ -119,7 +121,7 @@ def upload_data(conn, dealnames, workdate):
try:
c.executemany(sqlstr, [(dealname, updatedate, k) + tuple(v) for k, v in data.items()])
except (psycopg2.DataError, psycopg2.IntegrityError, TypeError) as detail:
- logging.error(detail)
+ logger.error(detail)
pdb.set_trace()
conn.commit()