aboutsummaryrefslogtreecommitdiffstats
path: root/python/load_loanprices_data.py
blob: 28bf2dc36ebb7b72b127fd3be133c843f91f26be (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import blpapi
import sys
from sqlalchemy import create_engine, MetaData, Table
import pandas as pd

engine = create_engine('postgresql://et_user:Serenitas1@debian/ET')
metadata = MetaData(bind=engine)

# Fill SessionOptions
sessionOptions = blpapi.SessionOptions()
sessionOptions.setServerHost('192.168.1.108')
sessionOptions.setServerPort(8194)
session = blpapi.Session(sessionOptions)

# Start a Session
if not session.start():
    print "Failed to start session."
    sys.exit(0)

if not session.openService("//blp/refdata"):
    print "Failed to open //blp/refdata"
    sys.exit(0)

refDataService = session.getService("//blp/refdata")
request = refDataService.createRequest("ReferenceDataRequest")
fields = ["ID_BB_UNIQUE", "ISSUE_DT",
          "SECURITY_TYP", "AMT_OUTSTANDING", "PX_LAST","LAST_UPDATE_DT",
          "ISSUER","MATURITY","CPN","CPN_TYP", "CPN_FREQ","FLT_SPREAD",
          "LIBOR_FLOOR","LN_CURRENT_MARGIN", "LN_TRANCHE_SIZE",
          "LN_COVENANT_LITE","SECOND_LIEN_INDICATOR","DEFAULTED", "PRICING_SOURCE"]

# append securities to request
#cusips = pd.read_sql_query("select distinct cusip from bloomberg_corp_ref where tranche_size is Null", engine)

def split_clean(l):
    a, b = line.rstrip().split(",")
    if b == '':
        b = None
    return (a, b)

with open("/home/share/CorpCDOs/data/bbg_loanxid.csv") as fh:
    mapping = dict([split_clean(line) for line in fh])
mapping = zip(*[(k, v) for k, v in mapping.iteritems()])
cusips = mapping[0]
loanxids = mapping[1]

# cusips = set(cusips)
for cusip in cusips:
    request.append("securities", "{0} Corp".format(cusip))

# append fields to request
for field in fields:
    request.append("fields", field)

session.sendRequest(request)

data = []
try:
    # Process received events
    while(True):
        # We provide timeout to give the chance to Ctrl+C handling:
        ev = session.nextEvent(500)
        if ev.eventType() in [blpapi.Event.PARTIAL_RESPONSE, blpapi.Event.RESPONSE]:
            for msg in ev:
                data.append(msg)
            # Response completely received, so we could exit
        if ev.eventType() == blpapi.Event.RESPONSE:
            break
finally:
    # Stop the session
    session.stop()

def process_msgs(data, fields):
    newdata = []
    for msg in data:
        if msg.messageType() == blpapi.Name("ReferenceDataResponse"):
            securityDataArray = msg.getElement("securityData")
            for securityData in securityDataArray.values():
                securityName = securityData.getElementValue("security")
                fieldData = securityData.getElement("fieldData")
                row = {'security': securityName}
                for fieldName in fields:
                    try:
                        fieldValue = fieldData.getElementValue(fieldName)
                        row[fieldName] = fieldValue
                    except blpapi.NotFoundException:
                        row[fieldName] = None
                newdata.append(row)
    return pd.DataFrame.from_dict(newdata)

data = process_msgs(data, fields)
data = data[~data.ISSUER.isnull()]
data.security = data.security.str.slice(0,9)
data.rename(columns={'PX_LAST': 'Price',
                     'CPN': 'Coupon',
                     'CPN_TYP': 'CouponType',
                     'CPN_FREQ': 'Frequency',
                     'FLT_SPREAD': 'Spread',
                     'LN_CURRENT_MARGIN': 'loan_margin',
                     'LN_COVENANT_LITE': 'covlite',
                     'SECOND_LIEN_INDICATOR': 'secondlien',
                     'PRICING_SOURCE': 'Source',
                     'AMT_OUTSTANDING':'amount_outstanding',
                     'SECURITY_TYP':'security_type',
                     'LAST_UPDATE_DT':'PricingDate',
                     'security': 'Cusip',
                     'LN_TRANCHE_SIZE': 'tranche_size'}, inplace=True)
data.rename(columns=lambda x:x.lower(), inplace=True)
data = data[~data.pricingdate.isnull()]
data.set_index(['cusip', 'pricingdate'], inplace=True)

engine = create_engine('postgresql://et_user:Serenitas1@debian/ET')
currentdata = pd.Index(pd.read_sql_query("select id_bb_unique from bloomberg_corp_ref", engine, parse_dates="pricingdate"))
data = data.ix[data.index.difference(currentdata)]
data = data.reset_index()
data[['id_bb_unique', 'pricingdate', 'price', 'loan_margin', 'amount_outstanding', 'defaulted', 'source']].to_sql("bloomberg_corp", engine, if_exists='append', index=False)
data[['loanxid','cusip']] = data[['loanxid','cusip']].applymap(lambda x: None if not x else [x])
records =data[['id_bb_unique', 'cusip', 'issuer', 'maturity', 'coupon', 'coupontype',
               'frequency', 'spread', 'libor_floor', 'tranche_size', 'covlite',
               'secondlien', 'security_type', 'issue_dt', 'loanxid']].to_dict(orient='records')

bloomberg_corp_ref = Table('bloomberg_corp_ref', metadata, autoload=True)
ins = bloomberg_corp_ref.insert()
engine.execute(ins, records)

with engine.begin() as conn:
    conn.execute("update bloomberg_corp_ref set tranche_size=%s where id_bb_unique=%s",
                 [(a, b) for a, b in data[['tranche_size', 'id_bb_unique']].to_records(index=False)])

for id_bb in data.id_bb_unique:
    engine.execute("delete from bloomberg_corp_ref where id_bb_unique=%s", (id_bb,))