aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--python/http-server.py21
-rw-r--r--python/load_indicative.py11
-rw-r--r--python/load_intex_collateral.py27
3 files changed, 25 insertions, 34 deletions
diff --git a/python/http-server.py b/python/http-server.py
index 5e04b0f7..0125acf2 100644
--- a/python/http-server.py
+++ b/python/http-server.py
@@ -13,7 +13,7 @@ class MyHandler(BaseHTTPRequestHandler):
insert_quotes()
if self.path == "/insert_intex_data":
workdate = str(datetime.date.today())
- intex_data(self.server.conn, workdate, self.server.logger)
+ intex_data(self.server.conn, workdate)
self.send_response(200)
self.end_headers()
@@ -30,33 +30,28 @@ class MyHandler(BaseHTTPRequestHandler):
self.end_headers()
def log_message(self, format, *args):
- self.server.logger.info("%s - - [%s] %s" %
- (self.address_string(),
- self.log_date_time_string(),
- format%args))
+ logging.info("%s - - [%s] %s" %
+ (self.address_string(),
+ self.log_date_time_string(),
+ format%args))
class MyServer(HTTPServer):
- def __init__(self, addr, handler, logger, queue, conn):
+ def __init__(self, addr, handler, queue, conn):
HTTPServer.__init__(self, addr, handler)
- self.logger = logger
self.queue = queue
self.conn = conn
class MyDaemon(Daemon):
def run(self):
server_address = ('',8000)
- logger = logging.getLogger('tasks')
- logger.setLevel('INFO')
- fh = logging.FileHandler("/home/share/CorpCDOs/logs/tasks.log")
- fh.setLevel('INFO')
- logger.addHandler(fh)
+ logging.basicConfig(filename='/home/share/CorpCDOs/logs/tasks.log', level=logging.INFO)
q = redis.Redis(unix_socket_path='/var/run/redis/redis.sock')
self.conn = psycopg2.connect(database="ET",
user="et_user",
password="Serenitas1",
host="debian",
cursor_factory=DictCursor)
- http = MyServer(server_address, MyHandler, logger, q, self.conn)
+ http = MyServer(server_address, MyHandler, q, self.conn)
http.serve_forever()
if __name__=="__main__":
diff --git a/python/load_indicative.py b/python/load_indicative.py
index 85f02e47..a692c534 100644
--- a/python/load_indicative.py
+++ b/python/load_indicative.py
@@ -7,11 +7,12 @@ import pdb
import sys
from common import root, sanitize_float
from db import conn
+import logging
def convertToNone(s):
return None if s in ["", "-", "NR"] else s
-def upload_cusip_data(conn, filename, logger=None):
+def upload_cusip_data(conn, filename):
dealupdate = {}
with open( filename, "r") as fh:
dr = csv.DictReader(fh, dialect='excel-tab')
@@ -69,8 +70,8 @@ def upload_cusip_data(conn, filename, logger=None):
with conn.cursor() as c:
c.execute(sqlstring, line)
except psycopg2.DataError as e:
- logger.info(e)
- print("uploaded: {0}".format(line['CUSIP']))
+ logging.error(e)
+ logging.debug("uploaded: {0}".format(line['CUSIP']))
conn.commit()
for dealname in deals_to_update:
with conn.cursor() as c:
@@ -136,10 +137,10 @@ def upload_deal_data(conn, filename):
c.execute(sqlstring, line)
deallist[dealname] = line['Latest Update']
except psycopg2.DataError as detail:
- print(detail)
+ logging.error(detail)
pdb.set_trace()
except KeyError as detail:
- print(detail)
+ logging.error(detail)
pdb.set_trace()
conn.commit()
diff --git a/python/load_intex_collateral.py b/python/load_intex_collateral.py
index 02018762..c7c2580f 100644
--- a/python/load_intex_collateral.py
+++ b/python/load_intex_collateral.py
@@ -2,7 +2,7 @@ import psycopg2
import os, csv, datetime
import pdb
from common import root, sanitize_float
-from db import conn, query_db
+from db import conn
import sys
import uuid
from load_indicative import upload_cusip_data, upload_deal_data
@@ -22,7 +22,7 @@ def windows1252_encoder(fh):
for line in fh:
yield line.decode('windows-1252').encode('utf-8')
-def upload_data(conn, dealnames, workdate, logger=None):
+def upload_data(conn, dealnames, workdate):
for dealname in dealnames:
#dealname, updatedate = line.rstrip().split()
# updatedate = datetime.datetime.strptime(updatedate, '%m/%d/%Y')
@@ -38,10 +38,7 @@ def upload_data(conn, dealnames, workdate, logger=None):
missingfields = set(fields).union({'Gross Margin'}) - set(dr.fieldnames)
if "LoanX ID" in missingfields:
msg = "{0}: LoanX ID column is missing. Probably an error in exporting from intex".format(dealname)
- if logger:
- logger.info(msg)
- else:
- print(msg)
+ logging.warning(msg)
data = {}
for line in dr:
for f in missingfields:
@@ -57,10 +54,10 @@ def upload_data(conn, dealnames, workdate, logger=None):
else:
#sanity checks for loanxid and cusip
if len(line['LoanX ID']) > 8:
- print("dubious id found: {0}".format(line['LoanX ID']))
+ logging.warning("dubious id found: {0}".format(line['LoanX ID']))
line['LoanX ID'] = line['LoanX ID'][:8]
if len(line['CUSIP']) > 9:
- print("dubious CUSIP found: {0}".format(line['CUSIP']))
+ logging.warning("dubious CUSIP found: {0}".format(line['CUSIP']))
line['CUSIP'] = line['CUSIP'][:9]
if len(line['Asset Subtype'])>10:
line['Asset Subtype'] = line['Asset Subtype'][:9]
@@ -112,7 +109,8 @@ def upload_data(conn, dealnames, workdate, logger=None):
currlen = c.fetchone()[0]
conn.commit()
if currlen != len(data): #then we delete and just reupload
- print("{0} has {1} rows in the database and current collateral file has {2}".format(dealname, currlen, len(data)))
+ logging.warning("{0} has {1} rows in the database " \
+ "and current collateral file has {2}".format(dealname, currlen, len(data)))
with conn.cursor() as c:
sqlstr = "DELETE FROM et_collateral where dealname = %s and updatedate = %s"
c.execute(sqlstr, (dealname, updatedate))
@@ -132,14 +130,11 @@ def upload_data(conn, dealnames, workdate, logger=None):
try:
c.executemany(sqlstr, [(dealname, updatedate, k) + tuple(v) for k, v in data.items()])
except (psycopg2.DataError, TypeError) as detail:
- if logger:
- logger.info(detail)
- else:
- print(detail)
+ logging.error(detail)
pdb.set_trace()
conn.commit()
-def intex_data(conn, workdate, logger=None):
+def intex_data(conn, workdate):
dealnames = [d.replace("_AD.txt", "").lower() for d in
os.listdir(os.path.join(root, "data", "Collaterals_" + workdate))]
basedir = os.path.join(root, "data", "Indicative_" + workdate)
@@ -151,8 +146,8 @@ def intex_data(conn, workdate, logger=None):
upload_deal_data(conn, deal_file)
#then load tranche data
for cusip_file in cusip_files:
- upload_cusip_data(conn, cusip_file, logger)
- upload_data(conn, dealnames, workdate, logger)
+ upload_cusip_data(conn, cusip_file)
+ upload_data(conn, dealnames, workdate)
if __name__ == "__main__":
if len(sys.argv) > 1: