aboutsummaryrefslogtreecommitdiffstats
path: root/python/markit/__main__.py
blob: f20ce9caf4710e1a86da5a260256f3e1b7a8ebbf (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import argparse
import logging
import numpy as np
import pandas as pd
import os
import time

from .cds import download_cds_data, download_composite_data
from .loans import (
    download_facility,
    insert_facility,
    download_marks,
    update_facility,
    download_recupdates,
)
from .rates import downloadMarkitIRData
from .import_quotes import insert_cds, insert_index, insert_tranche
from pandas.tseries.offsets import BDay
from sqlalchemy import create_engine
from utils import SerenitasFileHandler
from utils.db import dbconn

# parse arguments
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument(
    "-l", "--loans", action="store_true", help="download markit loan data"
)
group.add_argument("-c", "--cds", action="store_true", help="download markit cds data")
group.add_argument("-r", "--rates", action="store_true", help="download markit IR data")
parser.add_argument(
    "-i", "--insert-only", action="store_true", help="do not re-download data"
)
parser.add_argument(
    "workdate", nargs="?", type=lambda s: pd.datetime.strptime(s, "%Y-%m-%d").date()
)


def default_date():
    return (pd.datetime.today() - BDay(1)).date()


args = parser.parse_args()
historical = True
if args.workdate is None:
    if not args.rates:
        historical = False
        workdate = default_date()
    else:
        workdate = pd.datetime.today().date()
else:
    workdate = args.workdate

if args.loans:
    fh = SerenitasFileHandler("markit_loans.log")
elif args.cds:
    fh = SerenitasFileHandler("markit_cds.log")
elif args.rates:
    fh = SerenitasFileHandler("markit_rates.log")

# set up logging
logger = logging.getLogger("markit")
logger.addHandler(fh)
logger.setLevel(logging.INFO)

if args.loans:
    payload = {"LEGALENTITY": "lmcg", "USERNAME": "serecapuser", "PASSWORD": "Welcome1"}

    download_facility(workdate, payload)
    logger.info("facility downloaded")
    insert_facility(workdate)
    logger.info("facility inserted")
    payload.update({"RELATIVEVERSION": np.busday_count(default_date(), workdate)})
    download_marks(workdate, payload)
    logger.info("marks downloaded")
    payload.pop("RELATIVEVERSION")
    update_facility(workdate, payload)
    logger.info("facility updated")
    download_recupdates(workdate, payload)
    logger.info("recommanded updates downloaded")

elif args.cds:
    payload = {
        "user": "GuillaumeHorel",
        "password": "password",
        "version": "5",
        "format": "csv",
        "report": "FIXED_COUPON",
        "date": workdate.strftime("%Y%m%d"),
        "type": "CDS",
    }
    if not args.insert_only:
        download_cds_data(payload)
        payload.update({"type": "CredIndex", "version": 4})
        download_composite_data(payload, historical)
    serenitasdb = dbconn("serenitasdb")
    insert_cds(serenitasdb, workdate)
    serenitasdb.close()
    if not args.insert_only:
        engine = create_engine("postgresql://serenitas_user@debian/serenitasdb")
        insert_index(engine, workdate)
        insert_tranche(engine, workdate)

elif args.rates:
    for curr in ["USD", "EUR", "JPY"]:
        retry = 0
        while retry < 10:
            try:
                downloadMarkitIRData(workdate, currency=curr)
            except ValueError as e:
                logger.error(e)
                logger.error(f"Could not download {curr} rates for date {workdate}")
                time.sleep(30)
                retry += 1
            else:
                logger.info(f"Downloaded {curr} rates")
                break