import blpapi import sys from sqlalchemy import create_engine, MetaData, Table import pandas as pd engine = create_engine('postgresql://et_user:Serenitas1@debian/ET') metadata = MetaData(bind=engine) # Fill SessionOptions sessionOptions = blpapi.SessionOptions() sessionOptions.setServerHost('192.168.1.108') sessionOptions.setServerPort(8194) session = blpapi.Session(sessionOptions) # Start a Session if not session.start(): print "Failed to start session." sys.exit(0) if not session.openService("//blp/refdata"): print "Failed to open //blp/refdata" sys.exit(0) refDataService = session.getService("//blp/refdata") request = refDataService.createRequest("ReferenceDataRequest") fields = ["ID_BB_UNIQUE", "ISSUE_DT", "SECURITY_TYP", "AMT_OUTSTANDING", "PX_LAST","LAST_UPDATE_DT", "ISSUER","MATURITY","CPN","CPN_TYP", "CPN_FREQ","FLT_SPREAD", "LIBOR_FLOOR","LN_CURRENT_MARGIN", "LN_TRANCHE_SIZE", "LN_COVENANT_LITE","SECOND_LIEN_INDICATOR","DEFAULTED", "PRICING_SOURCE"] # append securities to request #cusips = pd.read_sql_query("select distinct cusip from bloomberg_corp_ref where tranche_size is Null", engine) def split_clean(l): a, b = line.rstrip().split(",") if b == '': b = None return (a, b) with open("/home/share/CorpCDOs/data/bbg_loanxid.csv") as fh: mapping = dict([split_clean(line) for line in fh]) mapping = zip(*[(k, v) for k, v in mapping.iteritems()]) cusips = mapping[0] loanxids = mapping[1] # cusips = set(cusips) for cusip in cusips: request.append("securities", "{0} Corp".format(cusip)) # append fields to request for field in fields: request.append("fields", field) session.sendRequest(request) data = [] try: # Process received events while(True): # We provide timeout to give the chance to Ctrl+C handling: ev = session.nextEvent(500) if ev.eventType() in [blpapi.Event.PARTIAL_RESPONSE, blpapi.Event.RESPONSE]: for msg in ev: data.append(msg) # Response completely received, so we could exit if ev.eventType() == blpapi.Event.RESPONSE: break finally: # Stop the session session.stop() def process_msgs(data, fields): newdata = [] for msg in data: if msg.messageType() == blpapi.Name("ReferenceDataResponse"): securityDataArray = msg.getElement("securityData") for securityData in securityDataArray.values(): securityName = securityData.getElementValue("security") fieldData = securityData.getElement("fieldData") row = {'security': securityName} for fieldName in fields: try: fieldValue = fieldData.getElementValue(fieldName) row[fieldName] = fieldValue except blpapi.NotFoundException: row[fieldName] = None newdata.append(row) return pd.DataFrame.from_dict(newdata) data = process_msgs(data, fields) data = data[~data.ISSUER.isnull()] data.security = data.security.str.slice(0,9) data.rename(columns={'PX_LAST': 'Price', 'CPN': 'Coupon', 'CPN_TYP': 'CouponType', 'CPN_FREQ': 'Frequency', 'FLT_SPREAD': 'Spread', 'LN_CURRENT_MARGIN': 'loan_margin', 'LN_COVENANT_LITE': 'covlite', 'SECOND_LIEN_INDICATOR': 'secondlien', 'PRICING_SOURCE': 'Source', 'AMT_OUTSTANDING':'amount_outstanding', 'SECURITY_TYP':'security_type', 'LAST_UPDATE_DT':'PricingDate', 'security': 'Cusip', 'LN_TRANCHE_SIZE': 'tranche_size'}, inplace=True) data.rename(columns=lambda x:x.lower(), inplace=True) data = data[~data.pricingdate.isnull()] data.set_index(['cusip', 'pricingdate'], inplace=True) engine = create_engine('postgresql://et_user:Serenitas1@debian/ET') currentdata = pd.Index(pd.read_sql_query("select id_bb_unique from bloomberg_corp_ref", engine, parse_dates="pricingdate")) data = data.ix[data.index.difference(currentdata)] data = data.reset_index() data[['id_bb_unique', 'pricingdate', 'price', 'loan_margin', 'amount_outstanding', 'defaulted', 'source']].to_sql("bloomberg_corp", engine, if_exists='append', index=False) data[['loanxid','cusip']] = data[['loanxid','cusip']].applymap(lambda x: None if not x else [x]) records =data[['id_bb_unique', 'cusip', 'issuer', 'maturity', 'coupon', 'coupontype', 'frequency', 'spread', 'libor_floor', 'tranche_size', 'covlite', 'secondlien', 'security_type', 'issue_dt', 'loanxid']].to_dict(orient='records') bloomberg_corp_ref = Table('bloomberg_corp_ref', metadata, autoload=True) ins = bloomberg_corp_ref.insert() engine.execute(ins, records) with engine.begin() as conn: conn.execute("update bloomberg_corp_ref set tranche_size=%s where id_bb_unique=%s", [(a, b) for a, b in data[['tranche_size', 'id_bb_unique']].to_records(index=False)]) for id_bb in data.id_bb_unique: engine.execute("delete from bloomberg_corp_ref where id_bb_unique=%s", (id_bb,))