aboutsummaryrefslogtreecommitdiffstats
path: root/python/load_loanprices_data.py
blob: 7c112b71bbe2f09ad6e4de2ccea74c9825b853c5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
import blpapi
import sys
from sqlalchemy import create_engine, MetaData, Table
import pandas as pd
from db import conn

engine = create_engine('postgresql://et_user:Serenitas1@debian/ET')
metadata = MetaData(bind=engine)

# Fill SessionOptions
sessionOptions = blpapi.SessionOptions()
sessionOptions.setServerHost('192.168.1.108')
sessionOptions.setServerPort(8194)
session = blpapi.Session(sessionOptions)

# Start a Session
if not session.start():
    print "Failed to start session."
    sys.exit(0)

if not session.openService("//blp/refdata"):
    print "Failed to open //blp/refdata"
    sys.exit(0)

refDataService = session.getService("//blp/refdata")
request = refDataService.createRequest("ReferenceDataRequest")
all_fields = ["ISSUE_DT", "LN_ISSUE_STATUS", "ID_CUSIP",
              "SECURITY_TYP", "AMT_OUTSTANDING", "PX_LAST","LAST_UPDATE_DT",
              "ISSUER", "MATURITY","CPN","CPN_TYP", "CPN_FREQ","FLT_SPREAD",
              "LIBOR_FLOOR","LN_CURRENT_MARGIN", "LN_TRANCHE_SIZE", "AMT_ISSUED",
              "LN_COVENANT_LITE","SECOND_LIEN_INDICATOR","DEFAULTED", "DEFAULT_DATE",
              "CALLED", "CALLED_DT", "PRICING_SOURCE"]
fields_update = ["LN_ISSUE_STATUS", "AMT_OUTSTANDING", "PX_LAST","LAST_UPDATE_DT",
                 "LN_CURRENT_MARGIN", "DEFAULTED", "DEFAULT_DATE",
                 "CALLED", "CALLED_DT", "PRICING_SOURCE"]

# append securities to request
cusips = pd.read_sql_query("select id_bb_unique, substring(id_bb_unique from 3) as cusip from bloomberg_corp_ref " \
                           "where (status is Null or status not in ('REFINANCED','RETIRED', 'REPLACED')) "\
                           "and not called", engine, index_col='cusip')
cusips = pd.read_sql_query("select id_bb_unique, substring(id_bb_unique from 3) as cusip from bloomberg_corp_ref", engine, index_col='cusip')

# cusips = set(cusips)
for cusip in cusips.index:
    request.append("securities", "{0} Corp".format(cusip))

# append fields to request
for field in fields_update:
    request.append("fields", field)

session.sendRequest(request)

data = []
try:
    # Process received events
    while(True):
        # We provide timeout to give the chance to Ctrl+C handling:
        ev = session.nextEvent(500)
        if ev.eventType() in [blpapi.Event.PARTIAL_RESPONSE, blpapi.Event.RESPONSE]:
            for msg in ev:
                data.append(msg)
            # Response completely received, so we could exit
        if ev.eventType() == blpapi.Event.RESPONSE:
            break
finally:
    # Stop the session
    session.stop()

def process_msgs(data, fields):
    newdata = []
    for msg in data:
        if msg.messageType() == blpapi.Name("ReferenceDataResponse"):
            securityDataArray = msg.getElement("securityData")
            for securityData in securityDataArray.values():
                securityName = securityData.getElementValue("security")
                fieldData = securityData.getElement("fieldData")
                row = {'security': securityName}
                for fieldName in fields:
                    try:
                        fieldValue = fieldData.getElementValue(fieldName)
                        row[fieldName] = fieldValue
                    except blpapi.NotFoundException:
                        row[fieldName] = None
                newdata.append(row)
    return pd.DataFrame.from_dict(newdata)

data = process_msgs(data, fields_update)
data.security = data.security.str.slice(0,9)
data['id_bb_unique'] = cusips.ix[data.security]['id_bb_unique'].reset_index(drop=True)
with conn.cursor() as c:
    for i in range(data.shape[0]):
        c.execute("UPDATE bloomberg_corp_ref set defaulted = %s, default_date = %s, " \
                  "called=%s, called_date = %s, status=%s " \
                  "where id_bb_unique=%s",
                  (data.iloc[i]['DEFAULTED'], data.iloc[i]['DEFAULT_DATE'], data.iloc[i]['CALLED'],
                   data.iloc[i]['CALLED_DT'], data.iloc[i]['LN_ISSUE_STATUS'], data.iloc[i]['id_bb_unique']))
conn.commit()

currentdata = pd.Index(pd.read_sql_query("SELECT id_bb_unique, pricingdate from bloomberg_corp",
                                         engine,
                                         parse_dates=["pricingdate"]))

#no need to insert empty prices
data.dropna(subset=['PX_LAST'], inplace=True)
data.set_index(['id_bb_unique', 'LAST_UPDATE_DT'], inplace=True)
data = data.ix[data.index.difference(currentdata)]
data.reset_index(inplace=True)

sqlstr = "INSERT INTO bloomberg_corp VALUES(%s, %s, %s, %s, %s, %s)"
with conn.cursor() as c:
    for i in range(data.shape[0]):
        margin = data.iloc[i]['LN_CURRENT_MARGIN']
        if np.isnan(margin):
            margin = None
        amt_outstanding = data.iloc[i]['AMT_OUTSTANDING']
        if np.isnan(amt_outstanding):
            amt_outstanding = None
        c.execute(sqlstr, (data.iloc[i]['id_bb_unique'], data.iloc[i]['LAST_UPDATE_DT'],
                           data.iloc[i]['PX_LAST'], margin, amt_outstanding,
                           data.iloc[i]['PRICING_SOURCE']))
conn.commit()