aboutsummaryrefslogtreecommitdiffstats
path: root/python/markit/__main__.py
blob: fdf9a86eff751a00d90149fe544b24072aac6b0d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import argparse
import datetime
import logging
import numpy as np
import pandas as pd
import os
import sys
import logging

from common import root
from .cds import download_cds_data, download_composite_data
from .loans import download_facility, insert_facility, download_marks, update_facility
from .rates import downloadMarkitIRData
from .import_quotes import insert_cds, insert_index, insert_tranche
from pandas.tseries.offsets import BDay
from sqlalchemy import create_engine
from .utils import default_date
from db import dbconn

## parse arguments
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-l", "--loans", action="store_true",
                   help="download markit loan data")
group.add_argument("-c", "--cds", action="store_true",
                   help="download markit cds data")
group.add_argument("-r", "--rates", action="store_true",
                   help="download markit IR data")
parser.add_argument("-i", "--insert-only", action="store_true",
                    help="do not re-download data")
parser.add_argument('workdate', nargs='?', type = lambda s: pd.datetime.strptime(s, "%Y-%m-%d").date())

args = parser.parse_args()
historical = True
if args.workdate is None:
    historical = False
    workdate = default_date()
else:
    workdate = args.workdate

if args.loans:
    log_file = os.path.join(root, 'logs', 'markit_loans.log')
elif args.cds:
    log_file = os.path.join(root, 'logs', 'markit_cds.log')
elif args.rates:
    log_file = os.path.join(root, 'logs', 'markit_rates.log')

## set up logging
logger = logging.getLogger('markit')
fh = logging.FileHandler(filename=log_file)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
logger.setLevel(logging.INFO)

if args.loans:
    payload={'LEGALENTITY': 'lmcg',
             'USERNAME': 'serecapuser',
             'PASSWORD': 'Welcome1'}

    download_facility(workdate, payload)
    logger.info('facility downloaded')
    insert_facility(workdate)
    logger.info('facility inserted')
    payload.update({'RELATIVE': np.busday_count(default_date(), workdate)})
    download_marks(workdate, payload)
    logger.info('marks downloaded')
    payload.pop('RELATIVE')
    update_facility(workdate, payload)
    logger.info('facility updated')

elif args.cds:
    payload = {'user': 'GuillaumeHorel',
               'password': 'password',
               'version': '5',
               'format': 'csv',
               'report': 'FIXED_COUPON',
               'date': workdate.strftime("%Y%m%d"),
               'type': 'CDS'}
    if not args.insert_only:
        download_cds_data(payload)
        payload.update({'type':'CredIndex','version':4})
        download_composite_data(payload, historical)
    serenitasdb = dbconn('serenitasdb')
    insert_cds(serenitasdb, workdate)
    serenitasdb.close()
    if not args.insert_only:
        engine = create_engine('postgresql://serenitas_user@debian/serenitasdb')
        insert_index(engine, workdate)
        insert_tranche(engine, workdate)

elif args.rates:
    for curr in ["USD", "EUR"]:
        downloadMarkitIRData(workdate, currency = curr)
        logger.info("Downloaded {} rates".format(curr))