import redis import socket import pandas as pd import csv import sys if sys.version_info.major == 3: from io import StringIO else: from cStringIO import StringIO from pickle import loads from ftplib import FTP import task_server.config as config import os from sqlalchemy import create_engine from bbg_helpers import init_bbg_session, retrieve_data, BBG_IP import re import psycopg2 import logging import argparse def decode_dict(d): return {k.decode() if isinstance(k, bytes) else k: \ v.decode() if isinstance(v, bytes) else v for k, v in d.items()} def get_redis_queue(): hostname = socket.gethostname() if hostname == 'debian': return redis.Redis(unix_socket_path='/var/run/redis/redis.sock') else: return redis.Redis(host = 'debian') def aux(v): if v.action.iat[-1] == 'CANCEL': return None if v.action.iat[0] == 'NEW': v.action.iat[-1] = 'NEW' return v.iloc[-1] def get_trades(q, name='bond_trades'): r = q.lrange(name, 0, -1) if sys.version_info.major == 3: df = pd.DataFrame([decode_dict(loads(e, encoding='bytes')) for e in r]) else: df = pd.DataFrame([loads(e) for e in r]) list_trades = [] if not df.empty: for tradeid, v in df.sort('lastupdate').groupby('id'): trade = aux(v) if trade is not None: list_trades.append(trade) return list_trades def build_line(obj, queue='bond_trades'): if queue == 'bond_trades': line = ["MortgageDeal", obj.dealid, obj.action ,"Serenitas", None, None , obj.folder, obj.custodian, obj.cashaccount, obj.cp_code, None, 'Valid', str(obj.trade_date), str(obj.settle_date), None, None, obj.cusip, obj['isin'], None, None, None, obj['description'], "Buy" if obj.buysell else "Sell", None, obj.accrued, obj.price, None, None, 'SERCGMAST', 'MORTGAGE', None, None, None, None, obj.faceamount, None, None, 'S'] elif queue == 'cds_trades': freq = {4: 'Quaterly', 12: 'Monthly'} line = ["CreditDefaultSwapDeal", obj.dealid, obj.action, "Serenitas", obj.dealid, obj.action ,"Serenitas", None, None , obj.folder, obj.custodian, obj.cashaccount, obj.cp_code, None, 'Valid', str(obj.trade_date), None, None, str(obj.effective_date), str(obj.maturity), obj.currency, obj.notional, obj.fixed_rate, obj.payment_rolldate, obj.day_count, freq[obj.frequency]] line += [None]*5 line += [obj.protection, obj.security_id, obj.security_desc] line += [None]*9 return line def bbg_process(conn, session, trade): fields = ["MTG_FACTOR_SET_DT", "INT_ACC"] fields_mtge = ["MTG_FACE_AMT", "START_ACC_DT"] fields_corp = ["AMT_ISSUED", "PREV_CPN_DT"] with conn.cursor() as c: c.execute("SELECT identifier FROM securities WHERE identifier=%s", (trade['identifier'],)) if not c.fetchone(): fields += ["MATURITY", "CRNCY", "NAME", "FLOATER", "FLT_SPREAD", "CPN", "CPN_FREQ", "FIRST_CPN_DT", "MTG_PAY_DELAY", "DAY_CNT_DES"] bbg_id = (trade['cusip'] or trade['isin']) + ' Mtge' bbg_type = 'Mtge' data = retrieve_data(session, [bbg_id], fields + fields_mtge, overrides={'SETTLE_DT': trade['settle_date']}) if not data[bbg_id]: bbg_id = (trade['cusip'] or trade['isin']) + ' Corp' bbg_type = 'Corp' data = retrieve_data(session, [bbg_id], fields + fields_corp, overrides={'SETTLE_DT': trade['settle_date']}) bbg_data = data[bbg_id] if bbg_data.get('MTG_FACTOR_SET_DT', 0) == 0: bbg_data['MTG_FACTOR_SET_DT'] = 1 bbg_data['INT_ACC'] = 0 currentface = trade['faceamount'] * bbg_data['MTG_FACTOR_SET_DT'] accrued_payment = bbg_data['INT_ACC'] * currentface /100. principal_payment = currentface * trade['price'] / 100. with conn.cursor() as c: c.execute("UPDATE bonds SET principal_payment = %s, accrued_payment = %s " "WHERE id = %s", (principal_payment, accrued_payment, trade['id'])) conn.commit() if len(fields) > 2: #we don't have the data in the securities table sql_fields = ['identifier', 'cusip', 'isin', 'description', 'face_amount', 'maturity', 'floater', 'spread', 'coupon', 'frequency', 'day_count', 'first_coupon_date', 'pay_delay', 'currency', 'bbg_type', 'asset_class', 'start_accrued_date'] sqlstr = "INSERT INTO securities({0}) VALUES({1})".format(",".join(sql_fields), ",".join(["%s"]*17)) isfloater = bbg_data['FLOATER'] == 'Y' pay_delay = bbg_data.get('MTG_PAY_DELAY', 0) day_count = bbg_data.get('DAY_CNT_DES') m = re.match("[^(\s]+", day_count) if m: day_count = m.group(0) if isinstance(pay_delay, str): pay_delay = int(pay_delay.split(' ')[0]) with conn.cursor() as c: c.execute(sqlstr, (trade['identifier'], trade['cusip'], trade['isin'], bbg_data['NAME'], bbg_data.get('MTG_FACE_AMT') or bbg_data.get('AMT_ISSUED'), bbg_data.get('MATURITY'), isfloater, bbg_data.get('FLT_SPREAD') if isfloater else None, bbg_data.get('CPN') if not isfloater else None, bbg_data.get('CPN_FREQ'), day_count, bbg_data.get('FIRST_CPN_DT'), pay_delay, bbg_data.get('CRNCY'), bbg_type, trade['asset_class'], bbg_data.get('START_ACC_DT') or bbg_data.get('PREV_CPN_DT'))) conn.commit() #mark it at buy price if trade.buysell: sqlstr = "INSERT INTO marks VALUES(%s, %s, %s)" try: with conn.cursor() as c: c.execute(sqlstr, (trade['trade_date'], trade['identifier'], trade['price'])) except psycopg2.IntegrityError: logging.error('We already have a mark') conn.rollback() finally: conn.commit() headers = {'bond_trades':['Deal Type', 'Deal ID', 'Action', 'Client', 'Reserved', 'Reserved', 'Folder', 'Custodian', 'Cash Account', 'Counterparty', 'Comments', 'State', 'Trade Date', 'Settlement Date', 'Reserved', 'GlopeOp Security Identifier', 'CUSIP', 'ISIN', 'Reserved', 'Reserved', 'Reserved', 'Security Description', 'Transaction Indicator', 'SubTransaction Indicator', 'Accrued', 'Price', 'BlockId', 'BlockAmount', 'Fund', 'Portfolio', 'Reserved', 'Reserved', 'ClientReference', 'ClearingMode', 'FaceAmount', 'Pool Factor', 'FactorAsOfDate', 'Delivery'], 'cds_trades': ['Deal Type', 'Deal ID', 'Action', 'Client', 'Reserved', 'Reserved', 'Folder', 'Custodian', 'Cash Account', 'Counterparty', 'Comments', 'State', 'Trade Date', 'Reserved', 'Reserved', 'EffectiveDate', 'MaturityDate', 'Currency', 'Notional', 'FixedRate', 'PaymentRollDateConvention', 'DayCount', 'PaymentFrequency', 'FirstCouponRate', 'FirstCouponDate', 'ResetLag', 'Liquidation', 'LiquidationDate', 'Protection', 'UnderlyingSecurityId', 'UnderlyingSecurityDescription', 'CreditSpreadCurve', 'CreditEvents', 'RecoveryRate', 'Settlement','InitialMargin', 'InitialMarginPercentage','InitialMarginCurrency', 'DiscountCurve', 'ClientReference', 'UpfrontFee', 'UpfrontFeePayDate', 'RegenerateCashFlow', 'UpfrontFeeComment', 'GiveUpBroker','SwapType', 'OnPrice', 'OffPrice', 'AttachmentPoint', 'ExhaustionPoint', 'Fees', 'Fee Payment Dates', 'Fee Comments', 'Credit Event Occurred', 'Calendar', 'Clearing Facility', 'Adjusted', 'CcpTradeRef', 'BlockId', 'BlockAmount', 'NettingId', 'AnnouncementDate', 'ExecTS', 'DefaultProbability', 'ClientMargin', 'Factor', 'ISDADefinition']} def generate_csv(l, name='bond_trades'): output = StringIO() csvwriter = csv.writer(output) csvwriter.writerow(headers[name]) for trade in l: csvwriter.writerow(build_line(trade, name)) if sys.version_info.major == 3: return output.getvalue().encode() else: return output.getvalue() def generate_csv(l): output - StringIO() csvwriter = csv.writer(output) headers = ['Deal Type', 'Deal ID', 'Action', 'Client', 'Reserved', 'Reserved', 'Folder', 'Custodian', 'Cash Account', 'Counterparty', 'Comments', 'State', 'Trade Date', 'Reserved', 'Reserved', 'EffectiveDate', 'MaturityDate', 'Currency', 'Notional', 'FixedRate', 'PaymentRollDateConvention', 'DayCount', 'PaymentFrequency', 'FirstCouponRate', 'FirstCouponDate', 'ResetLag', 'Liquidation', 'LiquidationDate', 'Protection', 'UnderlyingSecurityId', 'UnderlyingSecurityDescription', 'CreditSpreadCurve', 'CreditEvents', 'RecoveryRate', 'Settlement','InitialMargin', 'InitialMarginPercentage','InitialMarginCurrency', 'DiscountCurve', 'ClientReference', 'UpfrontFee', 'UpfrontFeePayDate', 'RegenerateCashFlow', 'UpfrontFeeComment', 'GiveUpBroker','SwapType', 'OnPrice', 'OffPrice', 'AttachmentPoint', 'ExhaustionPoint', 'Fees', 'Fee Payment Dates', 'Fee Comments', 'Credit Event Occurred', 'Calendar', 'Clearing Facility', 'Adjusted', 'CcpTradeRef', 'BlockId', 'BlockAmount', 'NettingId', 'AnnouncementDate', 'ExecTS', 'DefaultProbability', 'ClientMargin', 'Factor', 'ISDADefinition'] def upload_file(timestamp): ftp = FTP('ftp.globeop.com') ftp.login('srntsftp', config.ftp_password) ftp.cwd('incoming') filename = 'Serenitas.ALL.{0:%Y%m%d.%H%M%S}.Mortgages.csv'.format(timestamp) cmd = 'STOR {0}'.format(filename) with open(os.path.join('/home/share/Daily', str(timestamp.date()), filename), 'rb') as fh: ftp.storbinary(cmd, fh) def write_buffer(buf): timestamp = pd.datetime.now() filename = 'Serenitas.ALL.{0:%Y%m%d.%H%M%S}.Mortgages.csv'.format(timestamp) with open(os.path.join('/home/share/Daily', str(timestamp.date()), filename), 'wb') as fh: fh.write(buf) return timestamp if __name__=="__main__": parser = argparse.ArgumentParser() parser.add_argument("-n", "--no-upload", action="store_true", help="do not upload to Globeop") args = parser.parse_args() engine = create_engine('postgresql://dawn_user@debian/dawndb') conn = engine.raw_connection() q = get_redis_queue() for name in ['bond_trades', 'cds_trades']: l = get_trades(q, name) if l: buf = generate_csv(l) with init_bbg_session(BBG_IP) as session: for trade in l: bbg_process(conn, session, trade) timestamp = write_buffer(buf) if not args.no_upload: upload_file(timestamp) q.delete(name)