diff options
Diffstat (limited to 'python/markit')
| -rw-r--r-- | python/markit/__main__.py | 4 | ||||
| -rw-r--r-- | python/markit/cds.py | 11 | ||||
| -rw-r--r-- | python/markit/import_quotes.py | 79 |
3 files changed, 54 insertions, 40 deletions
diff --git a/python/markit/__main__.py b/python/markit/__main__.py index d74194d8..913eba77 100644 --- a/python/markit/__main__.py +++ b/python/markit/__main__.py @@ -13,7 +13,7 @@ from .loans import ( download_recupdates, ) from .rates import downloadMarkitIRData -from .import_quotes import copy_curves_forward, insert_cds, insert_index, insert_tranche +from .import_quotes import * from pandas.tseries.offsets import BDay from sqlalchemy import create_engine from utils import SerenitasFileHandler @@ -82,11 +82,11 @@ elif args.cds: "password": "password", "version": "5", "format": "csv", - "report": "FIXED_COUPON", "date": f"{workdate:%Y%m%d}", "type": "CDS", } if not args.insert_only: + # download_cds_data(payload, workdate, "COMPOSITES") download_cds_data(payload, workdate) payload.update({"type": "CredIndex", "version": 4}) download_composite_data(payload, workdate, historical) diff --git a/python/markit/cds.py b/python/markit/cds.py index d039b474..da2ca5c5 100644 --- a/python/markit/cds.py +++ b/python/markit/cds.py @@ -17,21 +17,24 @@ def convertToNone(v): return v if v else None
-def download_cds_data(payload, workdate):
+def download_cds_data(payload, workdate, report="FIXED_COUPON"):
+ payload.update({"report": report})
r = requests.post("https://www.markit.com/export.jsp", params=payload)
content = io.BytesIO(r.content)
save_dir = BASE_DIR / "Tranche_data" / "CDS" / f"{workdate:%Y}"
if not save_dir.exists():
save_dir.mkdir()
- csv_file = save_dir / f"{workdate}_fixed.csv.lz4"
+ suffix = "fixed" if report == "FIXED_COUPON" else "parspread"
+ csv_file = save_dir / f"{workdate}_{suffix}.csv.lz4"
try:
with zipfile.ZipFile(content) as z:
with lz4.frame.open(csv_file, "wb") as f2:
for f in z.namelist():
if f.endswith("csv"):
f1 = z.open(f)
- next(f1)
- next(f1)
+ if report == "FIXED_COUPON":
+ next(f1)
+ next(f1)
shutil.copyfileobj(f1, f2)
f1.close()
except zipfile.BadZipFile:
diff --git a/python/markit/import_quotes.py b/python/markit/import_quotes.py index abcf3831..a8d05a29 100644 --- a/python/markit/import_quotes.py +++ b/python/markit/import_quotes.py @@ -17,6 +17,14 @@ from yieldcurve import get_curve logger = logging.getLogger(__name__) +__all__ = ( + "copy_curves_forward", + "remove_curves", + "insert_cds", + "insert_index", + "insert_tranche", +) + def convert(x): try: @@ -112,17 +120,6 @@ def get_current_tickers(database, workdate): return get_markit_bbg_mapping(database, basketids, workdate) -def get_defaulted(mappings, default_table, workdate): - for bbg_id, _ in mappings: - if event_date := default_table.get(bbg_id, False): - if workdate >= event_date: - defaulted = event_date - break - else: - defaulted = None - return defaulted - - def csv_file_gen(workdate): CDS_DIR = BASE_DIR / "Tranche_data" / "CDS" / f"{workdate:%Y}" csv_file = CDS_DIR / f"{workdate}_fixed.csv.lz4" @@ -218,29 +215,32 @@ def insert_cds(database, workdate: datetime.date): coupon_rates = np.array( [convert(line[c]) / 100 for c in col_spread] ) - defaulted = get_defaulted(mappings, default_table, workdate) - try: - sc = SpreadCurve( - workdate, - yc_dict[k.currency], - None, - None, - None, - tenors, - coupon_rates, - upfront_rates, - recovery_rates, - ticker=k.ticker, - seniority=seniority_mapping[k.tier], - doc_clause=DocClause[k.short_code], - defaulted=defaulted, - ) - except ValueError: - logging.error(f"couldn't build curve for {k.ticker}") - else: - buf = sc.as_buffer(True) - for (cid, sen), curves in mappings: + for (cid, sen), curves in mappings: + defaulted = None + if event_date := default_table.get(cid, False): + if workdate >= event_date: + defaulted = event_date + try: + sc = SpreadCurve( + workdate, + yc_dict[k.currency], + None, + None, + None, + tenors, + coupon_rates, + upfront_rates, + recovery_rates, + ticker=k.ticker, + seniority=seniority_mapping[k.tier], + doc_clause=DocClause[k.short_code], + defaulted=defaulted, + ) + except ValueError: + logging.error(f"couldn't build curve for {k.ticker}") + else: + buf = sc.as_buffer(True) c.execute( "INSERT INTO cds_curves VALUES(%s, %s, %s, %s, %s) " "ON CONFLICT (date, company_id, seniority) " @@ -263,7 +263,7 @@ def insert_cds(database, workdate: datetime.date): for t, upf in zip(curves, upfront_rates) ], ) - tickers_found.add(k) + tickers_found.add(k) database.commit() # handle missing tickers @@ -288,6 +288,17 @@ def insert_cds(database, workdate: datetime.date): logger.error(f"{curve_key.full_ticker} never existed") else: if (workdate - date).days < 20: # we copy over the old curve + # check if there was an event of default + # in that case, mark the curve as defaulted + sc = SpreadCurve.from_bytes(curve, True) + if not sc.defaulted: + defaulted = None + if event_date := default_table.get(cid, False): + if workdate >= event_date: + defaulted = event_date + if defaulted: + sc.default_date = defaulted + curve = sc.as_buffer(True) c.execute( "INSERT INTO cds_curves VALUES(%s, %s, %s, %s, %s) " "ON CONFLICT (date, company_id, seniority) " |
