1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
|
import io
import logging
import lz4.frame
import os
import requests
import shutil
import zipfile
import time
from serenitas.utils.env import BASE_DIR
from pandas.tseries.offsets import BDay
logger = logging.getLogger(__name__)
def convertToNone(v):
return v if v else None
_MARKIT_CDS_URL = "https://www.cds.ihsmarkit.com/export.jsp"
def download_cds_data(payload, workdate, report="FIXED_COUPON", file_format="csv"):
file_format = payload["format"]
payload.update({"report": report})
r = requests.post(_MARKIT_CDS_URL, params=payload)
content = io.BytesIO(r.content)
save_dir = BASE_DIR / "Tranche_data" / "CDS" / f"{workdate:%Y}"
if not save_dir.exists():
save_dir.mkdir()
suffix = "fixed" if report == "FIXED_COUPON" else "parspread"
data_file = save_dir / f"{workdate}_{suffix}.{file_format}.lz4"
try:
with zipfile.ZipFile(content) as z:
with lz4.frame.open(data_file, "wb") as f2:
for f in z.namelist():
if f.endswith(file_format):
f1 = z.open(f)
if report == "FIXED_COUPON":
next(f1)
next(f1)
shutil.copyfileobj(f1, f2)
f1.close()
except zipfile.BadZipFile:
logger.error(content.getvalue().decode().strip())
def download_composite_data(
payload, workdate, historical=False, reports=("COMPOSITES", "TRANCHE_COMPOSITES")
):
# if historical, we want to maintain the invariant mtime(f)== payload['date'] + BDay(1)
if historical:
ts = (workdate + BDay(1)).timestamp()
for report in reports:
for family in ["CDX", "ITRAXX-EUROPE"]:
payload.update({"family": family, "report": report})
retry = 0
while retry < 10:
r = requests.post(_MARKIT_CDS_URL, params=payload)
try:
with zipfile.ZipFile(io.BytesIO(r.content)) as z:
for f in z.namelist():
if f.endswith(payload["format"]):
path = z.extract(
f,
path=BASE_DIR
/ "Tranche_data"
/ "Composite_reports",
)
if historical:
os.utime(path, (ts, ts))
except zipfile.BadZipfile:
logger.error(r.content.decode().strip())
time.sleep(5)
retry += 1
continue
else:
break
|