aboutsummaryrefslogtreecommitdiffstats
path: root/python/markit/cds.py
blob: 7fede498ca9ee43725e4956c02bfae0e438c6151 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import io
import logging
import os
import requests
import shutil
import zipfile
from common import root
from pandas.tseries.offsets import BDay

logger = logging.getLogger(__name__)

def convertToNone(v):
    return v if v else None

def download_cds_data(payload):
    r = requests.get('https://www.markit.com/export.jsp', params=payload)
    f2 = open(os.path.join(root, "Tranche_data", "CDS", "cds eod {0}.csv".format(payload['date'])), "wb")
    with zipfile.ZipFile(io.BytesIO(r.content)) as z:
        for f in z.namelist():
            if "csv" in f:
                f1 = z.open(f)
                next(f1)
                next(f1)
                shutil.copyfileobj(f1, f2)
                f1.close()
    f2.close()

def download_composite_data(payload, historical=False):
    ## if historical, we want to maintain the invariant mtime(f)== payload['date'] + BDay(1)
    if historical:
        ts = datetime.datetime.strptime(payload['date'], "%Y%m%d") + BDay(1)
        ts = ts.timestamp()
    for report in ['COMPOSITES', 'TRANCHE_COMPOSITES']:
        for family in ['CDX', 'ITRAXX-EUROPE']:
            payload.update({'family': family, 'report': report})
            r = requests.get('https://www.markit.com/export.jsp', params=payload)
            try:
                with zipfile.ZipFile(io.BytesIO(r.content)) as z:
                    for f in z.namelist():
                        if "csv" in f:
                            path = z.extract(f, path=os.path.join(root, "Tranche_data", "Composite_reports"))
                            if historical:
                                os.utime(path, (ts, ts))
            except zipfile.BadZipfile:
                logger.error(r.content)
                continue