aboutsummaryrefslogtreecommitdiffstats
path: root/python/load_loanprices_data.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/load_loanprices_data.py')
-rw-r--r--python/load_loanprices_data.py106
1 files changed, 83 insertions, 23 deletions
diff --git a/python/load_loanprices_data.py b/python/load_loanprices_data.py
index 8f1d42bb..28bf2dc3 100644
--- a/python/load_loanprices_data.py
+++ b/python/load_loanprices_data.py
@@ -1,5 +1,10 @@
import blpapi
import sys
+from sqlalchemy import create_engine, MetaData, Table
+import pandas as pd
+
+engine = create_engine('postgresql://et_user:Serenitas1@debian/ET')
+metadata = MetaData(bind=engine)
# Fill SessionOptions
sessionOptions = blpapi.SessionOptions()
@@ -18,15 +23,28 @@ if not session.openService("//blp/refdata"):
refDataService = session.getService("//blp/refdata")
request = refDataService.createRequest("ReferenceDataRequest")
-fields = ["PX_LAST","LAST_UPDATE_DT","ISSUER","MATURITY","CPN","CPN_TYP",
- "CPN_FREQ","FLT_SPREAD","LIBOR_FLOOR","LN_CURRENT_MARGIN",
+fields = ["ID_BB_UNIQUE", "ISSUE_DT",
+ "SECURITY_TYP", "AMT_OUTSTANDING", "PX_LAST","LAST_UPDATE_DT",
+ "ISSUER","MATURITY","CPN","CPN_TYP", "CPN_FREQ","FLT_SPREAD",
+ "LIBOR_FLOOR","LN_CURRENT_MARGIN", "LN_TRANCHE_SIZE",
"LN_COVENANT_LITE","SECOND_LIEN_INDICATOR","DEFAULTED", "PRICING_SOURCE"]
# append securities to request
-cusips=set([])
-with open("/home/share/CorpCDOs/data/bbgcusips.txt") as fh:
- cusips = [line.rstrip() for line in fh]
-cusips = set(cusips)
+#cusips = pd.read_sql_query("select distinct cusip from bloomberg_corp_ref where tranche_size is Null", engine)
+
+def split_clean(l):
+ a, b = line.rstrip().split(",")
+ if b == '':
+ b = None
+ return (a, b)
+
+with open("/home/share/CorpCDOs/data/bbg_loanxid.csv") as fh:
+ mapping = dict([split_clean(line) for line in fh])
+mapping = zip(*[(k, v) for k, v in mapping.iteritems()])
+cusips = mapping[0]
+loanxids = mapping[1]
+
+# cusips = set(cusips)
for cusip in cusips:
request.append("securities", "{0} Corp".format(cusip))
@@ -52,20 +70,62 @@ finally:
# Stop the session
session.stop()
-i=0
-data2=[]
-for msg in data:
- if msg.messageType() == blpapi.Name("ReferenceDataResponse"):
- securityDataArray = msg.getElement("securityData")
- for securityData in securityDataArray.values():
- i+=1
- securityName = securityData.getElementValue("security")
- fieldData = securityData.getElement("fieldData")
- row = {}
- for fieldName in fields:
- try:
- fieldValue = fieldData.getElementValue(fieldName)
- row[fieldName] = fieldValue
- except blpapi.NotFoundException:
- row[fieldName] = None
- data2.append(row)
+def process_msgs(data, fields):
+ newdata = []
+ for msg in data:
+ if msg.messageType() == blpapi.Name("ReferenceDataResponse"):
+ securityDataArray = msg.getElement("securityData")
+ for securityData in securityDataArray.values():
+ securityName = securityData.getElementValue("security")
+ fieldData = securityData.getElement("fieldData")
+ row = {'security': securityName}
+ for fieldName in fields:
+ try:
+ fieldValue = fieldData.getElementValue(fieldName)
+ row[fieldName] = fieldValue
+ except blpapi.NotFoundException:
+ row[fieldName] = None
+ newdata.append(row)
+ return pd.DataFrame.from_dict(newdata)
+
+data = process_msgs(data, fields)
+data = data[~data.ISSUER.isnull()]
+data.security = data.security.str.slice(0,9)
+data.rename(columns={'PX_LAST': 'Price',
+ 'CPN': 'Coupon',
+ 'CPN_TYP': 'CouponType',
+ 'CPN_FREQ': 'Frequency',
+ 'FLT_SPREAD': 'Spread',
+ 'LN_CURRENT_MARGIN': 'loan_margin',
+ 'LN_COVENANT_LITE': 'covlite',
+ 'SECOND_LIEN_INDICATOR': 'secondlien',
+ 'PRICING_SOURCE': 'Source',
+ 'AMT_OUTSTANDING':'amount_outstanding',
+ 'SECURITY_TYP':'security_type',
+ 'LAST_UPDATE_DT':'PricingDate',
+ 'security': 'Cusip',
+ 'LN_TRANCHE_SIZE': 'tranche_size'}, inplace=True)
+data.rename(columns=lambda x:x.lower(), inplace=True)
+data = data[~data.pricingdate.isnull()]
+data.set_index(['cusip', 'pricingdate'], inplace=True)
+
+engine = create_engine('postgresql://et_user:Serenitas1@debian/ET')
+currentdata = pd.Index(pd.read_sql_query("select id_bb_unique from bloomberg_corp_ref", engine, parse_dates="pricingdate"))
+data = data.ix[data.index.difference(currentdata)]
+data = data.reset_index()
+data[['id_bb_unique', 'pricingdate', 'price', 'loan_margin', 'amount_outstanding', 'defaulted', 'source']].to_sql("bloomberg_corp", engine, if_exists='append', index=False)
+data[['loanxid','cusip']] = data[['loanxid','cusip']].applymap(lambda x: None if not x else [x])
+records =data[['id_bb_unique', 'cusip', 'issuer', 'maturity', 'coupon', 'coupontype',
+ 'frequency', 'spread', 'libor_floor', 'tranche_size', 'covlite',
+ 'secondlien', 'security_type', 'issue_dt', 'loanxid']].to_dict(orient='records')
+
+bloomberg_corp_ref = Table('bloomberg_corp_ref', metadata, autoload=True)
+ins = bloomberg_corp_ref.insert()
+engine.execute(ins, records)
+
+with engine.begin() as conn:
+ conn.execute("update bloomberg_corp_ref set tranche_size=%s where id_bb_unique=%s",
+ [(a, b) for a, b in data[['tranche_size', 'id_bb_unique']].to_records(index=False)])
+
+for id_bb in data.id_bb_unique:
+ engine.execute("delete from bloomberg_corp_ref where id_bb_unique=%s", (id_bb,))