import logging import pandas as pd import socket import sys from db import dawn_engine try: from env import DAILY_DIR, LOG_DIR except KeyError: sys.exit("Please set 'DAILY_DIR' and 'LOG_DIR' in the environment") from exchange import ExchangeMessage from exchangelib import HTMLBody from pathlib import Path from time import sleep from pandas.tseries.offsets import BDay from paramiko import Transport, SFTPClient, RSAKey from parse_citi_pdf import get_citi_collateral from sqlalchemy.exc import IntegrityError from ssh2.session import Session from ssh2.utils import wait_socket from ssh2.error_codes import LIBSSH2_ERROR_EAGAIN from ssh2.sftp import (LIBSSH2_FXF_READ, LIBSSH2_SFTP_S_IRUSR, LIBSSH2_SFTP_S_IFDIR, LIBSSH2_SFTP_S_IFREG) logging.basicConfig(filename=LOG_DIR / 'collateral_calc.log', level=logging.WARNING, format='%(asctime)s %(message)s') def get_sftp_client(): transport = Transport(('prmssp.amer.sgcib.com', 22)) transport.connect(username='SerenitasGamma@USA', password='SSqrrLL99') return SFTPClient.from_transport(transport) def get_baml_sftp_client(): transport = Transport(('ftps.b2b.ml.com', 22)) pkey = RSAKey.from_private_key_file("/home/guillaume/.ssh/id_rsa") transport.connect(username='lmcginvs', pkey=pkey) return SFTPClient.from_transport(transport) def get_wells_sftp_client(): transport = Transport(('axst.wellsfargo.com', 10022)) transport.connect(username='LMCHsWC6EP', password='HI2s2h19+') return SFTPClient.from_transport(transport) def get_wells_sftp_client2(): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect(('axst.wellsfargo.com', 10022)) session = Session() session.handshake(sock) session.userauth_password('LMCHsWC6EP', '0lvK+7xL') sftp = session.sftp_init() return sftp def download_wells_files2(d=None): DATA_DIR = DAILY_DIR / "Wells_reports" sftp = get_wells_sftp_client2() files = [] with sftp.opendir("/RECEIVE/339425_DATO2") as fh: for size, buf, attrs in fh.readdir(): if attrs.permissions & LIBSSH2_SFTP_S_IFREG: files.append(buf.decode()) for f in files: local_file = DATA_DIR / f if not local_file.exists(): with sftp.open(f"/RECEIVE/339425_DATO2/{f}", LIBSSH2_FXF_READ, LIBSSH2_SFTP_S_IRUSR) as remote_handle, \ local_file.open("wb") as local_handle: for size, data in remote_handle: local_handle.write(data) def download_baml_files(d=None): DATA_DIR = DAILY_DIR / "BAML_reports" sftp = get_baml_sftp_client() for f in sftp.listdir('outgoing'): local_file = DATA_DIR / f if not local_file.exists(): sftp.get(f"outgoing/{f}", localpath=DATA_DIR / f) def download_wells_files(d=None): DATA_DIR = DAILY_DIR / "Wells_reports" sftp = get_wells_sftp_client() base_dir = "/RECEIVE/339425_DATO2" for f in sftp.listdir(base_dir): if not (DATA_DIR / f).exists(): sftp.get(f"{base_dir}/{f}", localpath=DATA_DIR / f) def download_sftp_files(d=None, report_types=["OTC_CASH_ACTIVITY", "OTC_POSITIONS", "OTC_MARGIN", "OTC_MARGIN_EX_DEF", "OTC_STATEMENT"], retry_count=0): if retry_count > 20: return DATA_DIR = DAILY_DIR / "SG_reports" sftp = get_sftp_client() if d is None: for f in sftp.listdir('OTC'): if f.endswith("OTC_STATEMENT.xls"): print(f) sftp.get(f"OTC/{f}", localpath=DATA_DIR / f) else: for report_type in report_types[:-1]: if f.endswith(f"{report_type}.csv"): print(f) sftp.get(f"OTC/{f}", localpath=DATA_DIR / f) else: continue else: file_list = sftp.listdir('OTC') for report_type in report_types: if report_type == "OTC_STATEMENT": f = f"{d:%Y%m%d}_{report_type}.xls" else: f = f"{d:%Y%m%d}_{report_type}.csv" if f not in file_list: logging.info("File not here yet, trying again in 500s...") logging.info(f"Try count: {retry_count}") sleep(500) sftp.close() download_sftp_files(d, report_types, retry_count + 1) else: sftp.get(f"OTC/{f}", localpath=DATA_DIR / f) sftp.close() def download_ms_emails_from_gmail(): DATA_DIR = DAILY_DIR / "MS_reports" from download_emails import GmailMessage gm = GmailMessage() for msg in gm.list_msg_ids('Globeop/Operations'): try: message = gm.from_id(msg['id']) subject = message['subject'] if 'SERCX **Daily' in subject: for attach in message.iter_attachments(): fname = attach.get_filename() if 'NETSwaps' in fname: fname = "Trade_Detail_" + fname.split("_")[1] elif 'NET_Collateral' in fname: fname = "Collateral_Detail_" + fname.rsplit("_", 1)[1] else: continue p = DATA_DIR / fname if p.exists(): continue else: p.write_bytes(part.get_payload(decode=True)) except (KeyError, UnicodeDecodeError) as e: logging.error("error decoding " + msg['id']) continue def download_ms_emails(count=20): em = ExchangeMessage() emails = em.get_msgs(path=["NYops", "Margin calls MS"], count=count, subject__contains="SERCX **Daily") DATA_DIR = DAILY_DIR / "MS_reports" for msg in emails: for attach in msg.attachments: if 'NETSwaps' in attach.name: fname = "Trade_Detail_" + attach.name.split("_")[1] elif 'NET_Collateral' in attach.name: fname = "Collateral_Detail_" + attach.name.rsplit("_", 1)[1] else: continue p = DATA_DIR / fname if not p.exists(): p.write_bytes(attach.content) def download_gs_emails(count=20): em = ExchangeMessage() emails = em.get_msgs(path=["NYops", "Margin calls"], count=count, subject__contains="Margin") DATA_DIR = DAILY_DIR / "GS_reports" for msg in emails: for attach in msg.attachments: fname = attach.name if fname.endswith('xls'): p = DATA_DIR / fname if not p.exists(): p.write_bytes(attach.content) def download_citi_emails(count=20): em = ExchangeMessage() emails = em.get_msgs(path=["NYops", "Margin Calls Citi"], count=count, subject__startswith="262966") DATA_DIR = DAILY_DIR / "CITI_reports" for msg in emails: for attach in msg.attachments: fname = attach.name p = DATA_DIR / fname if not p.exists(): p.write_bytes(attach.content) def baml_collateral(d): df = pd.read_csv(DAILY_DIR / "BAML_reports" / f"OTC_Open_Positions_-_Credit_-_LMCG_{d:%Y%m%d}.CSV", usecols=['MTM', 'ACCRUEDCPN', 'VARMARGIN', 'REDCODE', 'NOTIONAL', 'EODSETTLEMENTPRICE', 'PERIOD', 'BUYSELL'], index_col=['REDCODE', 'PERIOD']) df = df[df.EODSETTLEMENTPRICE.notnull()] positions = pd.read_sql_query("SELECT security_id, security_desc, folder, notional, currency " "FROM list_cds_positions_by_strat_fcm(%s, 'BAML')", dawn_engine, params=(d.date(),), index_col=['security_id']) df_helper = pd.read_sql_query("SELECT redindexcode, upper(tenor::text) AS tenor, " "to_char(maturity, 'YYYYMM')::integer AS PERIOD FROM index_desc", dawn_engine, index_col=['redindexcode', 'tenor']) positions['tenor'] = (positions.security_desc + "R").str.split(" ", expand=True)[4] positions = positions.set_index('tenor', append=True) positions['PERIOD'] = df_helper.loc[positions.index] positions = positions.reset_index(['tenor']).set_index(['PERIOD'], append=True) df['DIRTYUPFRONT'] = (df.MTM + df.ACCRUEDCPN ) / df.NOTIONAL df['DIRTYUPFRONT'] = df.DIRTYUPFRONT.where(df.BUYSELL == 'Buy', -df.DIRTYUPFRONT) df = df.groupby(level=[0, 1]).first() positions['dirtyupfront'] = df.loc[positions.index, 'DIRTYUPFRONT'] positions['amount'] = positions['notional'] * positions['dirtyupfront'] positions.folder = positions.folder.map({'HEDGE_MBS': 'MBSCDSCSH', 'SER_ITRXCURVE': 'SER_ITRXCVCSH', 'SER_IGCURVE': 'SER_IGCVECSH', 'HYOPTDEL': 'COCSH', 'IGOPTDEL': 'COCSH', 'IGINX': 'TCSH'}) df = (positions. groupby('folder'). agg({'amount': 'sum', 'currency': 'first'}). reset_index('folder')) df.columns = ['Strategy', 'Amount', 'Currency'] df_margin = pd.read_csv(DAILY_DIR / "BAML_reports" / f"OTC_Moneyline_{d:%Y%m%d}.CSV", usecols=['Statement Date', 'AT CCY', 'Initial Margin Requirement'], parse_dates=['Statement Date']) df_margin.columns = ['date', 'currency', 'amount'] df_margin['account'] = 'V0NSCLMFCM' try: dawn_engine.execute("INSERT INTO fcm_im " "VALUES(%(date)s, %(account)s, %(currency)s, %(amount)s)", df_margin.iloc[-1].to_dict()) except IntegrityError: pass return df def wells_collateral(d): account = "A5882186" df = pd.read_csv(DAILY_DIR / "Wells_reports" / f"OTC_CDS_Position_Activity_{account}_{d:%m%d%Y}.csv", usecols=["TENOR", "MARKET_VALUE_NPV", "STATEMENT_CLOSING_PRICE", "PAIR_CLIP", "BUY_SELL", "NOTIONAL", "MATURITY_DATE"], parse_dates=["MATURITY_DATE"], index_col=["PAIR_CLIP", "MATURITY_DATE"]) positions = pd.read_sql_query("SELECT security_id, security_desc, maturity, " "folder, notional, currency " "FROM list_cds_positions_by_strat_fcm(%s, 'WF')", dawn_engine, params=(d.date(),), index_col=["security_id", "maturity"]) df["DIRTYUPFRONT"] = df.MARKET_VALUE_NPV / df.NOTIONAL df["DIRTYUPFRONT"] = df.DIRTYUPFRONT.where(df.BUY_SELL == 1, -df.DIRTYUPFRONT) df.index.names = ["security_id", "maturity"] positions = positions.join(df, how="left") positions["amount"] = positions["notional"] * positions["DIRTYUPFRONT"] positions.folder = positions.folder.map({'HEDGE_MBS': 'MBSCDSCSH', 'SER_ITRXCURVE': 'SER_ITRXCVCSH', 'SER_IGCURVE': 'SER_IGCVECSH', "HYOPTDEL": 'COCSH', "IGOPTDEL": 'COCSH', "IGINX": "TCSH", "HYINX": "TCSH"}) df = (positions. groupby('folder'). agg({"amount": "sum", "currency": "first"}). reset_index("folder")) df.columns = ["Strategy", "Amount", "Currency"] df_margin = pd.read_csv(DAILY_DIR / "Wells_reports" / f"OTC_Moneyline_Activity_{account}_{d:%m%d%Y}.csv", usecols=["CURRENCY_NAME", "CURRENT_IM", "VALUE_DATE"], parse_dates=["VALUE_DATE"], index_col=["CURRENCY_NAME"]) try: dawn_engine.execute("INSERT INTO fcm_im " "VALUES(%s, 'WFNSCLMFCM', 'USD', %s)", df_margin.loc["ZZZZZ", ["VALUE_DATE", "CURRENT_IM"]].tolist()) except IntegrityError: pass return df def sg_collateral(d): df_activity = pd.read_csv(DAILY_DIR / "SG_reports" / f"{d:%Y%m%d}_OTC_CASH_ACTIVITY.csv", usecols=["Ticket Reference", "Record Type", "Currency", "Amount"]) df_position = pd.read_csv(DAILY_DIR / "SG_reports" / f"{d:%Y%m%d}_OTC_POSITIONS.csv", usecols=["Ticket Reference", "Reference Entity", "Mtm Value"]) df_activity = (df_activity.loc[df_activity['Record Type'] == "VM"]. set_index("Ticket Reference")) df_margin = pd.read_csv(DAILY_DIR / "SG_reports" / f"{d:%Y%m%d}_OTC_MARGIN_EX_DEF.csv", usecols=["Currency", "SG IMR"]) df_position = df_position.set_index("Ticket Reference") # expired_trades # df_position = df_position.append( # pd.DataFrame({"Reference Entity": 'CDX-NAIGS29V1-5Y', "Mtm Value": 0.}, # index=['T2201711010000A3K20000045561220U'])) df = df_activity.join(df_position) # expired trade (need to figure out how to get them from the report) # df.loc['N201811090000A3K215946925849228U1', 'Mtm Value'] = 0. # df.loc['N201811090000A3K215946925849228U1', 'Reference Entity'] = 'CDX-NAIGS31V1-5Y' df['Collateral'] = df['Mtm Value'] - df['Amount'] ref_entity = df['Reference Entity'].str.split("-", expand=True) del ref_entity[0] ref_entity.columns = ['to_split', 'tenor'] ref_entity = ref_entity.join(ref_entity['to_split'].str. extract("(IG|HY|EUROPE)S(\d+)V(\d+)$", expand=True)) del ref_entity['to_split'] ref_entity.columns = ['tenor', 'index_type', 'series', 'version'] ref_entity.index_type[ref_entity.index_type == "EUROPE"] = "EU" df = df.join(ref_entity) df = df.groupby(['index_type', 'series', 'tenor'])['Collateral'].sum() positions = pd.read_sql_query("SELECT security_desc, folder, notional, currency " "FROM list_cds_positions_by_strat(%s)", dawn_engine, params=(d.date(),)) instruments = positions.security_desc.str.split(expand=True)[[1, 3, 4]] instruments.columns = ['index_type', 'series', 'tenor'] instruments.series = instruments.series.str.extract("S(\d+)") instruments.index_type[instruments.index_type == "EUR"] = "EU" positions = positions.join(instruments) del positions['security_desc'] positions = positions.set_index(['index_type', 'series', 'tenor']) df = positions.join(df) def f(g): g.Collateral = g.Collateral * g.notional / g.notional.sum() return g df = (df.groupby(level=['index_type', 'series', 'tenor']). apply(f)) df = df.groupby(['folder']).agg({'Collateral': 'sum', 'currency': 'first'}) df = df.reset_index('folder') df = df.rename(columns={'folder': 'Strategy', 'currency': 'Currency', 'Collateral': 'Amount'}) df.Strategy = df.Strategy.map({'HEDGE_MBS': 'MBSCDSCSH', 'SER_ITRXCURVE': 'SER_ITRXCVCSH', 'SER_IGCURVE': 'SER_IGCVECSH', 'HYOPTDEL': 'HYCDSCSH', 'IGOPTDEL': 'IGCDSCSH'}) df_margin['account'] = 'SGNSCLMASW' df_margin = df_margin.rename(columns={'SG IMR': 'amount', 'Currency': 'currency'}) df_margin['date'] = d try: df_margin.to_sql("fcm_im", dawn_engine, if_exists='append', index=False) except IntegrityError: pass return df def ms_collateral(d, dawn_trades): df = pd.read_excel(DAILY_DIR / "MS_reports" / f"Collateral_Detail_{d:%Y%m%d}.xls") collat = df.loc[1, 'coll_val_ccy'].replace(",", "") if "(" in collat: collat = collat[1:-1] collat = - float(collat) else: collat = float(collat) df = pd.read_excel(DAILY_DIR / "MS_reports" / f"Trade_Detail_{d:%Y%m%d}.xls") df = df.merge(dawn_trades, how='left', left_on='trade_id', right_on='cpty_id') df = df.groupby('folder')[["collat_req_in_agr_ccy"]].sum() df['Currency'] = "USD" df = df.reset_index() col_names= ['Strategy', 'Amount', 'Currency'] df.columns = col_names return pd.concat([df, pd.DataFrame. from_records([('M_CSH_CASH', -collat - df.Amount.sum(), "USD")], columns=col_names)]) def load_gs_file(d, pattern): try: fname = next((DAILY_DIR / "GS_reports"). glob(f"{pattern}*{d.strftime('%d_%b_%Y')}*")) except StopIteration: raise FileNotFoundError(f"GS {pattern} file not found for date {d}") return pd.read_excel(fname, skiprows=9, skipfooter=77) def load_citi_file(d): try: fname = next((DAILY_DIR / "CITI_reports"). glob(f"262966_Portfolio_{d.strftime('%Y%m%d')}*")) except StopIteration: raise FileNotFoundError(f"CITI file not found for date {d}") return pd.read_excel(fname, skiprows=6, skipfooter=2) def get_dawn_trades(): df_cds = pd.read_sql_query("SELECT cpty_id, folder FROM cds " "WHERE cpty_id IS NOT NULL", dawn_engine) df_swaptions = pd.read_sql_query("SELECT cpty_id, folder FROM swaptions " "WHERE cpty_id IS NOT NULL", dawn_engine) df_caps = pd.read_sql_query("SELECT cpty_id, folder FROM capfloors " "WHERE cpty_id IS NOT NULL", dawn_engine) df = pd.concat([df_cds, df_swaptions, df_caps]) df = df.replace({"folder": {'IGREC': 'COCSH', 'IGPAYER': 'COCSH', 'HYPAYER': 'COCSH', 'HYREC': 'COCSH', 'STEEP': 'IRDEVCSH', 'FLAT': 'IRDEVCSH', 'MBSCDS': 'MBSCDSCSH', 'IGMEZ': 'TCSH', 'IGSNR': 'TCSH', 'IGEQY': 'TCSH', 'HYMEZ': 'TCSH', 'BSPK': 'TCSH'}}) return df def gs_collateral(d, dawn_trades): df = load_gs_file(d, "Collateral_Detail") collateral = float(df.Quantity) df = load_gs_file(d, "Trade_Detail") df = df[['Trade Id', 'Transaction Type', 'NPV (USD)', 'Initial Margin Required']] df = df.merge(dawn_trades, how='left', left_on='Trade Id', right_on='cpty_id') df = df[['folder', 'NPV (USD)', 'Initial Margin Required']] df = df.groupby('folder').sum() df = df.sum(axis=1).to_frame(name='Amount') df['Currency'] = 'USD' df = df.reset_index() df.columns = ['Strategy', 'Amount', 'Currency'] df.Amount *= -1 df = df.append({'Strategy': "M_CSH_CASH", 'Amount': -collateral - df.Amount.sum(), 'Currency': "USD"}, ignore_index=True) return df def citi_collateral(d, dawn_trades): df = load_citi_file(d) collateral = get_citi_collateral(d - BDay()) df = df[['Trade ID', 'Market Value', 'BasicAmt']] df = df.merge(dawn_trades, how='left', left_on='Trade ID', right_on='cpty_id') df = df.groupby('folder').sum() df = df.sum(axis=1).to_frame(name='Amount') df['Currency'] = 'USD' df = df.reset_index() df.columns = ['Strategy', 'Amount', 'Currency'] df.Amount *= -1 df = df.append({"Strategy": "M_CSH_CASH", "Amount": collateral - df.Amount.sum(), "Currency": "USD"}, ignore_index=True) return df def send_email(d, dfs): pd.set_option('display.float_format', '{:.2f}'.format) content = HTMLBody('
' '