import csv import logging import os import requests from . import with_connection from psycopg2 import IntegrityError from . import DATA_DIR logger = logging.getLogger(__name__) def download_facility(workdate, payload): facility_filename = DATA_DIR / "Facility files" / f"facility_{workdate}.csv" r = requests.post( "https://loans.markit.com/loanx/LoanXFacilityUpdates.csv", params=payload ) with facility_filename.open("wb") as fh: fh.write(r.content) def download_recupdates(workdate, payload): r = requests.post( "https://loans.markit.com/loanx/LoanXRecUpdates.csv", params=payload ) facility_rec_update = DATA_DIR / "Facility files" / f"facility_rec_{workdate}.csv" with facility_rec_update.open("wb") as fh: fh.write(r.content) @with_connection("etdb") def insert_facility(conn, workdate): facility_filename = DATA_DIR / "Facility files" / f"facility_{workdate}.csv" sqlstring = "INSERT INTO markit_facility VALUES( {0} )".format( ",".join(["%s"] * 13) ) with facility_filename.open("r") as fh: reader = csv.reader(fh) header = next(reader) if "Authentication failed" in header: logger.error("Couldn't authenticate") raise SystemExit with conn.cursor() as c: for line in reader: newline = tuple([v or None for v in line]) try: c.execute(sqlstring, newline) except IntegrityError as e: logger.error(e) conn.rollback() else: conn.commit() @with_connection("etdb") def download_marks(conn, workdate, payload): r = requests.post("https://loans.markit.com/loanx/LoanXMarks.csv", params=payload) marks_filename = DATA_DIR / "markit" / f"markit_data_{workdate}.csv" with marks_filename.open("wb") as fh: fh.write(r.content) sqlstring = "INSERT INTO markit_prices VALUES( {0} )".format(",".join(["%s"] * 5)) with open(marks_filename, "r") as fh: reader = csv.DictReader(fh) if "Authentication failed" in reader.fieldnames[0]: logger.error("Couldn't authenticate") raise SystemExit with conn.cursor() as c: for line in reader: if line["Depth"] == "implied": line["Depth"] = 0 c.execute( sqlstring, ( line["LoanX ID"], line["Bid"], line["Offer"], line["Depth"], line["Mark Date"], ), ) conn.commit() @with_connection("etdb") def update_facility(conn, workdate, payload): # we update the missing facility loanxids sqlstring = ( "SELECT loanxid FROM markit_prices EXCEPT SELECT loanxid FROM markit_facility" ) facility_diff_filename = ( DATA_DIR / "Facility files" / f"facility_diff_{workdate}.csv" ) with facility_diff_filename.open("wt") as fh: flag = False with conn.cursor() as c: c.execute(sqlstring) for loanxid in c: payload.update({"LOANXID": loanxid[0]}) r = requests.post( "https://loans.markit.com/loanx/LoanXOneFacility.csv", params=payload, ) header, *rest = r.content.decode().split("\n") if flag: fh.write(rest[0] + "\n") else: fh.write(header + "\n") fh.write(rest[0] + "\n") flag = True sqlstring = ( "INSERT INTO markit_facility(LoanXID, PMDID, IssuerName, dealname, facility_type," "loanx_facility_type, initial_amount, initial_spread, maturity, industry, modified_time)" "VALUES( {0} )".format(",".join(["%s"] * 11)) ) try: with facility_diff_filename.open("r") as fh: reader = csv.reader(fh) next(reader) with conn.cursor() as c: for line in reader: newline = [v or None for v in line] + [workdate] newline.pop(9) # remove the spread to maturity value c.execute(sqlstring, newline) conn.commit() except StopIteration: pass conn.close()