import io import logging import os import requests import shutil import zipfile import time from env import BASE_DIR from pandas.tseries.offsets import BDay import pandas as pd logger = logging.getLogger(__name__) def convertToNone(v): return v if v else None def download_cds_data(payload): r = requests.post("https://www.markit.com/export.jsp", params=payload) content = io.BytesIO(r.content) try: with zipfile.ZipFile(content) as z: fname = BASE_DIR / "Tranche_data" / "CDS" / f"cds eod {payload['date']}.csv" with fname.open("wb") as f2: for f in z.namelist(): if "csv" in f: f1 = z.open(f) next(f1) next(f1) shutil.copyfileobj(f1, f2) f1.close() except zipfile.BadZipFile: logger.error(content.getvalue().decode()) def download_composite_data(payload, historical=False): # if historical, we want to maintain the invariant mtime(f)== payload['date'] + BDay(1) if historical: ts = (pd.Timestamp(payload["date"]) + BDay(1)).timestamp() for report in ["COMPOSITES", "TRANCHE_COMPOSITES"]: for family in ["CDX", "ITRAXX-EUROPE"]: payload.update({"family": family, "report": report}) while True: r = requests.post("https://www.markit.com/export.jsp", params=payload) try: with zipfile.ZipFile(io.BytesIO(r.content)) as z: for f in z.namelist(): if "csv" in f: path = z.extract( f, path=os.path.join( os.environ["BASE_DIR"], "Tranche_data", "Composite_reports", ), ) if historical: os.utime(path, (ts, ts)) except zipfile.BadZipfile: logger.error(r.content.decode()) time.sleep(5) continue else: break