1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
|
import os
import common
import csv
import datetime
from utils.db import serenitasdb
import pdb
from import_cds_quotes import get_current_tickers
import psycopg2
def convert(x):
try:
return float(x)
except:
return None
sqlstr = "select markit_ticker, cds_curve from index_members(%s, %s)"
with serenitasdb.cursor() as c:
c.execute(sqlstr, ('HY17', datetime.date(2014,5,10)))
bbg_markit_mapping = {a: b[1:] for a, b in c}
basedir = "/home/share/CorpCDOs/Scenarios/Calibration"
filelist = [f for f in os.listdir(basedir) if "hy17_singlenames" in f]
sqlstr = "INSERT INTO cds_quotes(date, curve_ticker, upfrontbid, upfrontask," \
"runningbid, runningask, source, recovery) VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
tenors = [str(i)+'Y' for i in [1, 2, 3, 4, 5, 7, 10]]
for f in filelist:
print(f)
date = f[17:27]
with open(os.path.join(basedir, f)) as fh:
csvreader = csv.DictReader(fh)
c = serenitasdb.cursor()
for line in csvreader:
if not line['ticker']:
continue
toinsert = [(date, ticker, convert(line[tenor]), convert(line[tenor]),
float(line['running']), float(line['running']), 'MKIT',
float(line['recovery'])/100) for tenor, ticker in
zip(tenors, bbg_markit_mapping[line['ticker']])]
try:
c.executemany(sqlstr, toinsert)
serenitasdb.commit()
except psycopg2.IntegrityError:
print("%s already in there for date %s" % (line['ticker'], date))
serenitasdb.rollback()
continue
c.close()
# rootdir = os.path.join(common.root, "Tranche_Data")
# filelist = [f for f in os.listdir(rootdir) if "hy21_singlenames" in f or "hy19_singlenames" in f]
# sqlstr = "INSERT INTO cds_quotes(date, curve_ticker, upfrontbid, upfrontask," \
# "runningbid, runningask, source, recovery) VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
# sqlstr2 = "SELECT curve_ticker FROM cds_quotes where date=%s"
# sqlstr3 = "DELETE from cds_quotes where date=%s and curve_ticker=%s"
# tenor = ['6M']+[str(i)+'Y' for i in [1, 2, 3, 4, 5, 7, 10]]
# for f in filelist:
# date = datetime.datetime.strptime(f[17:27], "%Y-%m-%d").date()
# with serenitasdb.cursor() as c:
# c.execute(sqlstr2, (date,))
# l = set([t[0] for t in c])
# print(f)
# with open(os.path.join(rootdir, f)) as fh:
# csvreader = csv.DictReader(fh)
# for line in csvreader:
# if line['ticker']=='':
# continue
# knowntickers = [(date, t) for t in tm[line['ticker']][1:6] if t in l]
# unknowntickers = [(tenor[i], t) for i, t in enumerate(tm[line['ticker']][1:6], 1)]
# with serenitasdb.cursor() as c:
# c.executemany(sqlstr3, knowntickers)
# serenitaspdb.commit()
# toinsert = [(date, ticker, convert(line[tenor]), convert(line[tenor]),
# float(line['running']), float(line['running']), 'MKIT',
# float(line['recovery'])/100) for tenor, ticker in unknowntickers]
# with serenitasdb.cursor() as c:
# c.executemany(sqlstr, toinsert)
# serenitasdb.commit()
# tenord = {'3Y': '3yr', '5Y': '5yr', '7Y': '7yr', '10Y': '10yr'}
# sqlstr = "INSERT INTO tranche_quotes VALUES({0})".format(",".join(["%s"]*17))
# # with open("../../CDXNAIG Tranches.csv") as fh:
# # csvreader=csv.DictReader(fh)
# # toinsert = [(datetime.datetime.strptime(line['Date'], "%m/%d/%Y"), 'IG', line['Index Series'],
# # line['Index Version'], tenord[line['Index Term']], int(float(line['Attachment'])*100),
# # int(float(line['Detachment'])*100), convert(line['Tranche Upfront Mid']),
# # convert(line['Tranche Spread Mid']),
# # 0, float(line['IndexRefSpread'])*10000, None, None, None, None, None, 'MKIT')
# # for line in csvreader if line['Date']!='7/5/2010']
# # with conn.cursor() as c:
# # c.executemany(sqlstr, toinsert)
# # conn.commit()
# with conn.cursor() as c:
# c.execute("SELECT * FROM quotes where index='IG'")
# toinsert = [tuple(r[:7])+(r['upfront'],r['running'])+(None,r['refbasketprice'])+tuple(r[10:]) for r in c]
# with conn.cursor() as c:
# c.executemany(sqlstr, toinsert)
# conn.commit()
|