diff options
| -rw-r--r-- | python/common.py | 26 | ||||
| -rw-r--r-- | python/db.py | 31 | ||||
| -rw-r--r-- | python/facility_download.py | 8 | ||||
| -rw-r--r-- | python/facility_update.py | 14 | ||||
| -rw-r--r-- | python/futures.py | 4 | ||||
| -rw-r--r-- | python/load_bloomberg_data.py | 22 | ||||
| -rw-r--r-- | python/load_markit_data.py | 11 | ||||
| -rw-r--r-- | python/markit_download.py | 29 | ||||
| -rw-r--r-- | python/monitor.py | 7 |
9 files changed, 80 insertions, 72 deletions
diff --git a/python/common.py b/python/common.py index 07d44a29..f34e3efc 100644 --- a/python/common.py +++ b/python/common.py @@ -1,32 +1,6 @@ import os -import psycopg2 if os.name =='nt': root = "//WDsentinel/share/CorpCDOs" elif os.name == 'posix': root = '/home/share/CorpCDOs' - -conn = psycopg2.connect(database="ET", - user="et_user", - password="Serenitas1", - host="debian") - -def query_db(conn, sqlstr, **kwargs): - - params = kwargs.get('params', None) - one = kwargs.get('one', True) - try: - c = conn.cursor() - if params: - c.execute(sqlstr, params) - else: - c.execute(sqlstr) - if one: - return c.fetchone() - else: - return c.fetchall() - c.close() - except psycopg2.Error: - c.close() - conn.rollback() - return None diff --git a/python/db.py b/python/db.py new file mode 100644 index 00000000..3abe1800 --- /dev/null +++ b/python/db.py @@ -0,0 +1,31 @@ +import psycopg2 +conn = psycopg2.connect(database="ET", + user="et_user", + password="Serenitas1", + host="debian") + +def with_connection(f): + def with_connection_(*args, **kwargs): + # or use a pool, or a factory function... + try: + rv = f(conn, *args, **kwargs) + except Exception as e: + conn.rollback() + raise + + return rv + + return with_connection_ + +@with_connection +def query_db(conn, sqlstr, params=None, one=True): + c = conn.cursor() + if params: + c.execute(sqlstr, params) + else: + c.execute(sqlstr) + if one: + return c.fetchone() + else: + return c.fetchall() + c.close() diff --git a/python/facility_download.py b/python/facility_download.py index f56dae7c..141f6e4f 100644 --- a/python/facility_download.py +++ b/python/facility_download.py @@ -1,7 +1,7 @@ import requests
import os
-import common
-from common import query_db
+from common import root
+from db import conn, query_db
legal = 'serecap'
username = 'serecapuser'
@@ -26,7 +26,7 @@ flag = False sqlstring = "select loanxid from markit_prices2 except (select loanxid from latest_markit_prices2)"
loan_ids = query_db(sqlstring, one=False)
-with open( os.path.join(common.root, "data", "Facility files",
+with open( os.path.join(root, "data", "Facility files",
"facility_test.csv"), "wb") as fh:
for loanxid in loan_ids:
r = requests.get('https://loans.markit.com/loanx/LoanXOneFacility.csv?LEGALENTITY={0}&USERNAME={1}&PASSWORD={2}&LOANXID={3}'.format(legal, username, password, loanxid[0]))
@@ -37,4 +37,4 @@ with open( os.path.join(common.root, "data", "Facility files", fh.write(r.content.split('\n')[1] + "\n")
flag = True
-common.conn.close()
+conn.close()
diff --git a/python/facility_update.py b/python/facility_update.py index 95d38064..88179e0b 100644 --- a/python/facility_update.py +++ b/python/facility_update.py @@ -2,9 +2,9 @@ import requests import os
import os.path
import datetime
-import common
+from common import root
import csv
-import pdb
+from db import conn
import psycopg2
legal = 'serecap'
@@ -17,7 +17,7 @@ def convertToNone(v): return v if v else None
date = str(datetime.date.today())
-filename = os.path.join(common.root, "data", "Facility files", "facility_{0}.csv".format(date))
+filename = os.path.join(root, "data", "Facility files", "facility_{0}.csv".format(date))
with open( filename, "wb") as fh:
fh.write(r.content)
@@ -51,11 +51,11 @@ with open( filename, "wb") as fh: with open( filename, "r") as fh:
reader = csv.reader(fh)
- reader.next()
- c = common.conn.cursor()
+ next(reader)
+ c = conn.cursor()
sqlstring = "INSERT INTO markit_facility VALUES( {0} )".format( ",".join(["%s"] * 13))
for line in reader:
newline = [convertToNone(v) for v in line]
c.execute(sqlstring, newline)
-common.conn.commit()
-common.conn.close()
+conn.commit()
+conn.close()
diff --git a/python/futures.py b/python/futures.py index 98e1cd23..f8cd852d 100644 --- a/python/futures.py +++ b/python/futures.py @@ -1,7 +1,7 @@ import json
import datetime
import requests
-import common
+from common import root
import sys
import os
import pytz
@@ -40,7 +40,7 @@ r = requests.get(uri) quotes = json.loads(r.text)
central = pytz.timezone('US/Central')
-with open(os.path.join(common.root, "data", "Yield Curves",
+with open(os.path.join(root, "data", "Yield Curves",
"futures-{0}.csv".format(workdate.date())), "w") as fh:
for q in quotes[u'marketDataInfoAsStringList'][u'message']:
q['tradeDate'] = q['tradeDate'].strip(' ').strip()
diff --git a/python/load_bloomberg_data.py b/python/load_bloomberg_data.py index a03e609a..a6109509 100644 --- a/python/load_bloomberg_data.py +++ b/python/load_bloomberg_data.py @@ -1,9 +1,9 @@ -import psycopg2
import os.path
import datetime
from datetime import date
import csv
-import common
+from common import root
+from db import conn
def convertToNone(s):
return None if (s=='' or s=='NA') else s
@@ -14,17 +14,16 @@ fields_corp = ["Cusip", "Price", "PricingDate", "Issuer", "Maturity", "Coupon", fields_mtge = ["Cusip", "Issuer", "Maturity", "Coupon", "CouponType", "Frequency",
"Spread", "Moody", "InitialMoody"]
-root = os.path.join(common.root, "data", "bloomberg")
+root = os.path.join(root, "data", "bloomberg")
for filename in os.listdir(root):
- c = common.cursor()
- c.execute("select cusip, pricingdate from historical_bloomberg_corp(%s)", (date.today(),))
+ with conn.cursor() as c:
+ c.execute("select cusip, pricingdate from historical_bloomberg_corp(%s)", (date.today(),))
corpcusips = dict(c)
- c.close()
with open( os.path.join(root, filename), "r") as fh:
dr = csv.DictReader(fh)
if "datacorp" in filename:
- c = common.conn.cursor()
+ c = conn.cursor()
for line in dr:
if line["LAST_UPDATE_DT"] != 'NA':
line["LAST_UPDATE_DT"] = \
@@ -45,11 +44,11 @@ for filename in os.listdir(root): c.execute(sqlstring, tuple(row))
except IndexError:
pdb.set_trace()
- common.conn.commit()
c.close()
+ conn.commit()
elif "datamtge" in filename:
- c = common.cursor()
+ c = conn.cursor()
c.execute("select * from bloomberg_mtge")
mtgecusips = {record[0]: None for record in c}
for line in dr:
@@ -64,7 +63,8 @@ for filename in os.listdir(root): "VALUES({1})".format(",".join(fields_mtge),
",".join(["%s"] * len(fields_mtge)))
c.execute(sqlstring, tuple(row))
- common.conn.commit()
+ c.close()
+ conn.commit()
-common.conn.close()
+conn.close()
print("done")
diff --git a/python/load_markit_data.py b/python/load_markit_data.py index 7c7810a8..519149c4 100644 --- a/python/load_markit_data.py +++ b/python/load_markit_data.py @@ -4,7 +4,8 @@ import datetime from datetime import date import pdb import csv -import common +from db import conn +from common import root def convertToNone(s): return None if (s=='' or s=='NA') else s @@ -14,7 +15,7 @@ fields = ["LoanX ID", "Issuer", "Deal Name", "Facility", "Industry", \ "Final Maturity", "Initial Spread", "Bid", "Offer", "Depth", \ "Spread To Maturity"] -c = common.conn.cursor() +c = conn.cursor() c.execute("SELECT MAX(pricingdate) from markit_prices") latestdate = c.fetchone()[0] c.close() @@ -45,12 +46,12 @@ for filename in os.listdir(os.path.join(root, "data", "markit")): pdb.set_trace() data.append(temp) - c = common.conn.cursor() + c = conn.cursor() for row in data: sqlstring = "INSERT INTO markit_prices({0}) " \ "VALUES({1})".format(",".join(sql_fields), ",".join(["%s"] * len(sql_fields))) c.execute(sqlstring, tuple(row) + (date,)) - common.conn.commit() + conn.commit() c.close() -common.conn.close() +conn.close() print("done") diff --git a/python/markit_download.py b/python/markit_download.py index 07bce90c..604d89da 100644 --- a/python/markit_download.py +++ b/python/markit_download.py @@ -1,5 +1,6 @@ import requests
-import common
+from common import root
+from db import conn
import os
import datetime
import csv
@@ -14,45 +15,45 @@ def convertToNone(v): return v if v else None
r = requests.get('https://loans.markit.com/loanx/LoanXMarks.csv?LEGALENTITY={0}&USERNAME={1}&PASSWORD={2}&EOD=Y'.format(legal, username, password))
-marks_filename = os.path.join(common.root, "data", "markit", "markit_data_{0}.csv".format(workdate))
+marks_filename = os.path.join(root, "data", "markit", "markit_data_{0}.csv".format(workdate))
with open(marks_filename, "wb") as fh:
fh.write(r.content)
r = requests.get('https://loans.markit.com/loanx/LoanXFacilityUpdates.csv?LEGALENTITY={0}&USERNAME={1}&PASSWORD={2}'.format(legal, username, password))
-facility_filename = os.path.join(common.root, "data", "Facility files", "facility_{0}.csv".format(workdate))
+facility_filename = os.path.join(root, "data", "Facility files", "facility_{0}.csv".format(workdate))
with open( facility_filename, "wb") as fh:
fh.write(r.content)
sqlstring = "INSERT INTO markit_prices2 VALUES( {0} )".format( ",".join([ "%s" ] * 5))
with open(marks_filename, "r") as fh:
reader = csv.reader(fh)
- reader.next() # we skip the headers
- c = common.conn.cursor()
+ next(reader) # we skip the headers
+ c = conn.cursor()
for line in reader:
if line[4] == "implied":
line[4] = 0
c.execute(sqlstring, (line[0], line[2], line[3], line[4], line[1]))
-common.conn.commit()
+conn.commit()
c.close()
sqlstring = "INSERT INTO markit_facility VALUES( {0} )".format( ",".join(["%s"] * 13))
with open( facility_filename, "r") as fh:
reader = csv.reader(fh)
reader.next() # we skip the headers
- c = common.conn.cursor()
+ c = conn.cursor()
for line in reader:
newline = [convertToNone(v) for v in line]
c.execute(sqlstring, newline)
-common.conn.commit()
+conn.commit()
c.close()
#we update the missing facility loanxids
-c = common.conn.cursor()
+c = conn.cursor()
sqlstring = "SELECT loanxid FROM markit_prices2 EXCEPT SELECT loanxid FROM markit_facility";
c.execute(sqlstring)
-facility_diff_filename = os.path.join(common.root, "data", "Facility files",
+facility_diff_filename = os.path.join(root, "data", "Facility files",
"facility_diff_{0}.csv".format(workdate))
with open( facility_diff_filename, "wb") as fh:
flag = False
@@ -69,16 +70,16 @@ c.close() sqlstring = "INSERT INTO markit_facility(LoanXID, PMDID, IssuerName, dealname, facility_type," \
"loanx_facility_type, initial_amount, initial_spread, maturity, industry, modified_time)" \
"VALUES( {0} )".format( ",".join( ["%s"] * 11))
-c = common.conn.cursor()
+c = conn.cursor()
if os.path.getsize(facility_diff_filename):
with open(facility_diff_filename, "r") as fh:
reader = csv.reader(fh)
- reader.next()
+ next(reader)
for line in reader:
newline = [convertToNone(v) for v in line] + [workdate]
newline.pop(9) # remove the spread to maturity value
c.execute(sqlstring, newline)
- common.conn.commit()
+ conn.commit()
c.close()
-common.conn.close()
+conn.close()
diff --git a/python/monitor.py b/python/monitor.py index 0bcdf37e..51fada8f 100644 --- a/python/monitor.py +++ b/python/monitor.py @@ -2,7 +2,8 @@ import os import time import subprocess import datetime -import common +from common import root +from db import conn from intex_scenarios import generate_scenarios Rpath = os.path.join(common.root, "code", "R") @@ -22,6 +23,6 @@ while True: with open(os.path.join(logpath, "build_scenarios.Rout"), "w") as fh: subprocess.call(args2, stderr = subprocess.STDOUT, stdout = fh) for dealname in dealnames: - generate_scenarios(common.conn, workdate, dealname) + generate_scenarios(workdate, dealname) time.sleep(3) -common.conn.close() +conn.close() |
