1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
|
import argparse
import logging
import numpy as np
import pandas as pd
import os
from .cds import download_cds_data, download_composite_data
from .loans import download_facility, insert_facility, download_marks, update_facility
from .rates import downloadMarkitIRData
from .import_quotes import insert_cds, insert_index, insert_tranche
from sqlalchemy import create_engine
from .utils import default_date
from utils.db import dbconn
# parse arguments
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-l", "--loans", action="store_true",
help="download markit loan data")
group.add_argument("-c", "--cds", action="store_true",
help="download markit cds data")
group.add_argument("-r", "--rates", action="store_true",
help="download markit IR data")
parser.add_argument("-i", "--insert-only", action="store_true",
help="do not re-download data")
parser.add_argument('workdate', nargs='?',
type=lambda s: pd.datetime.strptime(s, "%Y-%m-%d").date())
args = parser.parse_args()
historical = True
if args.workdate is None:
historical = False
workdate = default_date()
else:
workdate = args.workdate
if args.loans:
log_file = os.path.join(os.environ['LOG_DIR'], 'markit_loans.log')
elif args.cds:
log_file = os.path.join(os.environ['LOG_DIR'], 'markit_cds.log')
elif args.rates:
log_file = os.path.join(os.environ['LOG_DIR'], 'markit_rates.log')
# set up logging
logger = logging.getLogger('markit')
fh = logging.FileHandler(filename=log_file)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
logger.setLevel(logging.INFO)
if args.loans:
payload = {'LEGALENTITY': 'lmcg',
'USERNAME': 'serecapuser',
'PASSWORD': 'Welcome1'}
download_facility(workdate, payload)
logger.info('facility downloaded')
insert_facility(workdate)
logger.info('facility inserted')
payload.update({'RELATIVEVERSION': np.busday_count(default_date(), workdate)})
download_marks(workdate, payload)
logger.info('marks downloaded')
payload.pop('RELATIVEVERSION')
update_facility(workdate, payload)
logger.info('facility updated')
elif args.cds:
payload = {'user': 'GuillaumeHorel',
'password': 'password',
'version': '5',
'format': 'csv',
'report': 'FIXED_COUPON',
'date': workdate.strftime("%Y%m%d"),
'type': 'CDS'}
if not args.insert_only:
download_cds_data(payload)
payload.update({'type': 'CredIndex', 'version': 4})
download_composite_data(payload, historical)
serenitasdb = dbconn('serenitasdb')
insert_cds(serenitasdb, workdate)
serenitasdb.close()
if not args.insert_only:
engine = create_engine('postgresql://serenitas_user@debian/serenitasdb')
insert_index(engine, workdate)
insert_tranche(engine, workdate)
elif args.rates:
for curr in ["USD", "EUR", "JPY"]:
downloadMarkitIRData(workdate, currency=curr)
logger.info("Downloaded {} rates".format(curr))
|