diff options
Diffstat (limited to 'python/markit')
| -rw-r--r-- | python/markit/loans.py | 278 |
1 files changed, 139 insertions, 139 deletions
diff --git a/python/markit/loans.py b/python/markit/loans.py index f4fa4a21..bc5f0aaa 100644 --- a/python/markit/loans.py +++ b/python/markit/loans.py @@ -1,139 +1,139 @@ -import csv
-import logging
-import os
-import requests
-
-from . import with_connection
-from psycopg2 import IntegrityError
-
-logger = logging.getLogger(__name__)
-
-
-def download_facility(workdate, payload):
- r = requests.post(
- "https://loans.markit.com/loanx/LoanXFacilityUpdates.csv", params=payload
- )
- facility_filename = os.path.join(
- os.environ["DATA_DIR"], "Facility files", "facility_{0}.csv".format(workdate)
- )
- with open(facility_filename, "wb") as fh:
- fh.write(r.content)
-
-
-def download_recupdates(workdate, payload):
- r = requests.post(
- "https://loans.markit.com/loanx/LoanXRecUpdates.csv", params=payload
- )
- facility_rec_update = os.path.join(
- os.environ["DATA_DIR"],
- "Facility files",
- "facility_rec_{0}.csv".format(workdate),
- )
- with open(facility_rec_update, "wb") as fh:
- fh.write(r.content)
-
-
-@with_connection("etdb")
-def insert_facility(conn, workdate):
- facility_filename = os.path.join(
- os.environ["DATA_DIR"], "Facility files", "facility_{0}.csv".format(workdate)
- )
- sqlstring = "INSERT INTO markit_facility VALUES( {0} )".format(
- ",".join(["%s"] * 13)
- )
- with open(facility_filename, "r") as fh:
- reader = csv.reader(fh)
- header = next(reader)
- if "Authentication failed" in header:
- logger.error("Couldn't authenticate")
- raise SystemExit
- with conn.cursor() as c:
- for line in reader:
- newline = tuple([v or None for v in line])
- try:
- c.execute(sqlstring, newline)
- except IntegrityError as e:
- logger.error(e)
- conn.rollback()
- else:
- conn.commit()
-
-
-@with_connection("etdb")
-def download_marks(conn, workdate, payload):
- r = requests.post("https://loans.markit.com/loanx/LoanXMarks.csv", params=payload)
- marks_filename = os.path.join(
- os.environ["DATA_DIR"], "markit", "markit_data_{0}.csv".format(workdate)
- )
- with open(marks_filename, "wb") as fh:
- fh.write(r.content)
- sqlstring = "INSERT INTO markit_prices VALUES( {0} )".format(",".join(["%s"] * 5))
- with open(marks_filename, "r") as fh:
- reader = csv.DictReader(fh)
- if "Authentication failed" in reader.fieldnames[0]:
- logger.error("Couldn't authenticate")
- raise SystemExit
- with conn.cursor() as c:
- for line in reader:
- if line["Depth"] == "implied":
- line["Depth"] = 0
- c.execute(
- sqlstring,
- (
- line["LoanX ID"],
- line["Bid"],
- line["Offer"],
- line["Depth"],
- line["Mark Date"],
- ),
- )
- conn.commit()
-
-
-@with_connection("etdb")
-def update_facility(conn, workdate, payload):
- # we update the missing facility loanxids
- sqlstring = (
- "SELECT loanxid FROM markit_prices EXCEPT SELECT loanxid FROM markit_facility"
- )
- facility_diff_filename = os.path.join(
- os.environ["DATA_DIR"],
- "Facility files",
- "facility_diff_{0}.csv".format(workdate),
- )
- with open(facility_diff_filename, "wt") as fh:
- flag = False
- with conn.cursor() as c:
- c.execute(sqlstring)
- for loanxid in c:
- payload.update({"LOANXID": loanxid[0]})
- r = requests.post(
- "https://loans.markit.com/loanx/LoanXOneFacility.csv",
- params=payload,
- )
- header, *rest = r.content.decode().split("\n")
- if flag:
- fh.write(rest[0] + "\n")
- else:
- fh.write(header + "\n")
- fh.write(rest[0] + "\n")
- flag = True
-
- sqlstring = (
- "INSERT INTO markit_facility(LoanXID, PMDID, IssuerName, dealname, facility_type,"
- "loanx_facility_type, initial_amount, initial_spread, maturity, industry, modified_time)"
- "VALUES( {0} )".format(",".join(["%s"] * 11))
- )
- try:
- with open(facility_diff_filename, "r") as fh:
- reader = csv.reader(fh)
- next(reader)
- with conn.cursor() as c:
- for line in reader:
- newline = [v or None for v in line] + [workdate]
- newline.pop(9) # remove the spread to maturity value
- c.execute(sqlstring, newline)
- conn.commit()
- except StopIteration:
- pass
- conn.close()
+import csv +import logging +import os +import requests + +from . import with_connection +from psycopg2 import IntegrityError + +logger = logging.getLogger(__name__) + + +def download_facility(workdate, payload): + r = requests.post( + "https://loans.markit.com/loanx/LoanXFacilityUpdates.csv", params=payload + ) + facility_filename = os.path.join( + os.environ["DATA_DIR"], "Facility files", "facility_{0}.csv".format(workdate) + ) + with open(facility_filename, "wb") as fh: + fh.write(r.content) + + +def download_recupdates(workdate, payload): + r = requests.post( + "https://loans.markit.com/loanx/LoanXRecUpdates.csv", params=payload + ) + facility_rec_update = os.path.join( + os.environ["DATA_DIR"], + "Facility files", + "facility_rec_{0}.csv".format(workdate), + ) + with open(facility_rec_update, "wb") as fh: + fh.write(r.content) + + +@with_connection("etdb") +def insert_facility(conn, workdate): + facility_filename = os.path.join( + os.environ["DATA_DIR"], "Facility files", "facility_{0}.csv".format(workdate) + ) + sqlstring = "INSERT INTO markit_facility VALUES( {0} )".format( + ",".join(["%s"] * 13) + ) + with open(facility_filename, "r") as fh: + reader = csv.reader(fh) + header = next(reader) + if "Authentication failed" in header: + logger.error("Couldn't authenticate") + raise SystemExit + with conn.cursor() as c: + for line in reader: + newline = tuple([v or None for v in line]) + try: + c.execute(sqlstring, newline) + except IntegrityError as e: + logger.error(e) + conn.rollback() + else: + conn.commit() + + +@with_connection("etdb") +def download_marks(conn, workdate, payload): + r = requests.post("https://loans.markit.com/loanx/LoanXMarks.csv", params=payload) + marks_filename = os.path.join( + os.environ["DATA_DIR"], "markit", "markit_data_{0}.csv".format(workdate) + ) + with open(marks_filename, "wb") as fh: + fh.write(r.content) + sqlstring = "INSERT INTO markit_prices VALUES( {0} )".format(",".join(["%s"] * 5)) + with open(marks_filename, "r") as fh: + reader = csv.DictReader(fh) + if "Authentication failed" in reader.fieldnames[0]: + logger.error("Couldn't authenticate") + raise SystemExit + with conn.cursor() as c: + for line in reader: + if line["Depth"] == "implied": + line["Depth"] = 0 + c.execute( + sqlstring, + ( + line["LoanX ID"], + line["Bid"], + line["Offer"], + line["Depth"], + line["Mark Date"], + ), + ) + conn.commit() + + +@with_connection("etdb") +def update_facility(conn, workdate, payload): + # we update the missing facility loanxids + sqlstring = ( + "SELECT loanxid FROM markit_prices EXCEPT SELECT loanxid FROM markit_facility" + ) + facility_diff_filename = os.path.join( + os.environ["DATA_DIR"], + "Facility files", + "facility_diff_{0}.csv".format(workdate), + ) + with open(facility_diff_filename, "wt") as fh: + flag = False + with conn.cursor() as c: + c.execute(sqlstring) + for loanxid in c: + payload.update({"LOANXID": loanxid[0]}) + r = requests.post( + "https://loans.markit.com/loanx/LoanXOneFacility.csv", + params=payload, + ) + header, *rest = r.content.decode().split("\n") + if flag: + fh.write(rest[0] + "\n") + else: + fh.write(header + "\n") + fh.write(rest[0] + "\n") + flag = True + + sqlstring = ( + "INSERT INTO markit_facility(LoanXID, PMDID, IssuerName, dealname, facility_type," + "loanx_facility_type, initial_amount, initial_spread, maturity, industry, modified_time)" + "VALUES( {0} )".format(",".join(["%s"] * 11)) + ) + try: + with open(facility_diff_filename, "r") as fh: + reader = csv.reader(fh) + next(reader) + with conn.cursor() as c: + for line in reader: + newline = [v or None for v in line] + [workdate] + newline.pop(9) # remove the spread to maturity value + c.execute(sqlstring, newline) + conn.commit() + except StopIteration: + pass + conn.close() |
