import os import logging import pandas as pd from db import dbengine from exchange import ExchangeMessage from exchangelib import HTMLBody from pathlib import Path from time import sleep from pandas.tseries.offsets import BDay from paramiko import Transport, SFTPClient, RSAKey from download_emails import GmailMessage from sqlalchemy.exc import IntegrityError DAILY_DIR = Path(os.environ["DAILY_DIR"]) logging.basicConfig(filename=os.path.join(os.getenv("LOG_DIR"), 'collateral_calc.log'), level=logging.WARNING, format='%(asctime)s %(message)s') def get_sftp_client(): transport = Transport(('prmssp.amer.sgcib.com', 22)) transport.connect(username='SerenitasGamma@USA', password='SSqrrLL99') return SFTPClient.from_transport(transport) def get_baml_sftp_client(): transport = Transport(('ftps.b2b.ml.com', 22)) pkey = RSAKey.from_private_key_file("/home/guillaume/.ssh/id_rsa") transport.connect(username='lmcginvs', pkey=pkey) return SFTPClient.from_transport(transport) def download_baml_files(d=None): DATA_DIR = DAILY_DIR / "BAML_reports" sftp = get_baml_sftp_client() for f in sftp.listdir('outgoing'): sftp.get(f"outgoing/{f}", localpath= DATA_DIR / f) def download_sftp_files(d=None, report_types=["OTC_CASH_ACTIVITY", "OTC_POSITIONS", "OTC_MARGIN", "OTC_MARGIN_EX_DEF", "OTC_STATEMENT"], retry_count=0): if retry_count > 20: return DATA_DIR = DAILY_DIR / "SG_reports" sftp = get_sftp_client() if d is None: for f in sftp.listdir('OTC'): if f.endswith("OTC_STATEMENT.xls"): print(f) sftp.get(f"OTC/{f}", localpath=DATA_DIR / f) else: for report_type in report_types[:-1]: if f.endswith(f"{report_type}.csv"): print(f) sftp.get(f"OTC/{f}", localpath=DATA_DIR / f) else: continue else: file_list = sftp.listdir('OTC') for report_type in report_types: if report_type == "OTC_STATEMENT": f = f"{d:%Y%m%d}_{report_type}.xls" else: f = f"{d:%Y%m%d}_{report_type}.csv" if f not in file_list: logging.info("File not here yet, trying again in 500s...") logging.info(f"Try count: {retry_count}") sleep(500) sftp.close() download_sftp_files(d, report_types, retry_count + 1) else: sftp.get(f"OTC/{f}", localpath=DATA_DIR / f) sftp.close() def download_ms_emails_from_gmail(): DATA_DIR = DAILY_DIR / "MS_reports" for msg in GmailMessage.List_msg_ids('Globeop/Operations'): try: message = GmailMessage.from_id(msg['id']) subject = message['subject'] if 'SERCX **Daily' in subject: for attach in message.iter_attachments(): fname = attach.get_filename() if 'NETSwaps' in fname: fname = "Trade_Detail_" + fname.split("_")[1] elif 'NET_Collateral' in fname: fname = "Collateral_Detail_" + fname.rsplit("_", 1)[1] else: continue p = DATA_DIR / fname if p.exists(): continue else: p.write_bytes(part.get_payload(decode=True)) except (KeyError, UnicodeDecodeError) as e: logging.error("error decoding " + msg['id']) continue def download_ms_emails(count=20): em = ExchangeMessage() emails = em.get_msgs(path=["NYops", "Margin calls MS"], count=count, subject__contains="SERCX **Daily") DATA_DIR = DAILY_DIR / "MS_reports" for msg in emails: for attach in msg.attachments: if 'NETSwaps' in attach.name: fname = "Trade_Detail_" + attach.name.split("_")[1] elif 'NET_Collateral' in attach.name: fname = "Collateral_Detail_" + attach.name.rsplit("_", 1)[1] else: continue p = DATA_DIR / fname if not p.exists(): p.write_bytes(attach.content) def download_gs_emails(count=20): em = ExchangeMessage() emails = em.get_msgs(path=["NYops", "Margin calls"], count=count, subject__contains="Margin") DATA_DIR = DAILY_DIR / "GS_reports" for msg in emails: for attach in msg.attachments: fname = attach.name if fname.endswith('xls'): p = DATA_DIR / fname if not p.exists(): p.write_bytes(attach.content) def download_citi_emails(count=20): em = ExchangeMessage() emails = em.get_msgs(path=["NYops", "Margin Calls Citi"], count=count, subject__startswith="262966") DATA_DIR = DAILY_DIR / "CITI_reports" for msg in emails: for attach in msg.attachments: fname = attach.name p = DATA_DIR / fname if not p.exists(): p.write_bytes(attach.content) def baml_collateral(d): dawn_engine = dbengine("dawndb") df = pd.read_csv(DAILY_DIR / "BAML_reports" / f"OTC_Open_Positions_-_Credit_-_LMCG_{d:%Y%m%d}.CSV", usecols=['MTM', 'ACCRUEDCPN', 'VARMARGIN', 'REDCODE', 'NOTIONAL', 'EODSETTLEMENTPRICE', 'PERIOD', 'BUYSELL'], index_col=['REDCODE', 'PERIOD']) df = df[df.EODSETTLEMENTPRICE.notnull()] positions = pd.read_sql_query("SELECT security_id, security_desc, folder, notional, currency " "FROM list_cds_positions_by_strat(%s)", dawn_engine, params=(d.date(),), index_col=['security_id']) df_helper = pd.read_sql_query("SELECT redindexcode, upper(tenor::text) AS tenor, " "to_char(maturity, 'YYYYMM')::integer AS PERIOD FROM index_desc", dawn_engine, index_col=['redindexcode', 'tenor']) positions['tenor'] = (positions.security_desc + "R").str.split(" ", expand=True)[4] positions = positions.set_index('tenor', append=True) positions['PERIOD'] = df_helper.loc[positions.index] positions = positions.reset_index(['tenor']).set_index(['PERIOD'], append=True) df['DIRTYUPFRONT'] = (df.MTM + df.ACCRUEDCPN ) / df.NOTIONAL df['DIRTYUPFRONT'] = df.DIRTYUPFRONT.where(df.BUYSELL == 'Sell', -df.DIRTYUPFRONT) df = df.groupby(level=[0, 1]).first() positions['dirtyupfront'] = df.loc[positions.index, 'DIRTYUPFRONT'] positions['amount'] = positions['notional'] * positions['dirtyupfront'] df = (positions. groupby('folder'). agg({'amount': 'sum', 'currency': 'first'}). reset_index('folder')) df.columns = ['Strategy', 'Amount', 'Currency'] df.Strategy = df.Strategy.map({'HEDGE_MBS': 'MBSCDSCSH', 'SER_ITRXCURVE': 'SER_ITRXCVCSH', 'SER_IGCURVE': 'SER_IGCVECSH', 'HYOPTDEL': 'HYCDSCSH', 'IGOPTDEL': 'IGCDSCSH', 'SER_IGINX': 'IGTCDSCSH'}) df_margin = pd.read_csv(DAILY_DIR / "BAML_reports" / f"OTC_Moneyline_{d:%Y%m%d}.CSV", usecols=['Statement Date', 'AT CCY', 'Initial Margin Requirement'], parse_dates=['Statement Date']) df_margin.columns = ['date', 'currency', 'amount'] df_margin['account'] = 'V0NSCLMFCM' try: dawn_engine.execute("INSERT INTO fcm_im " "VALUES(%(date)s, %(account)s, %(currency)s, %(amount)s)", df_margin.iloc[-1].to_dict()) except IntegrityError: pass return df def sg_collateral(d): df_activity = pd.read_csv(DAILY_DIR / "SG_reports" / f"{d:%Y%m%d}_OTC_CASH_ACTIVITY.csv", usecols=["Ticket Reference", "Record Type", "Currency", "Amount"]) df_position = pd.read_csv(DAILY_DIR / "SG_reports" / f"{d:%Y%m%d}_OTC_POSITIONS.csv", usecols=["Ticket Reference", "Reference Entity", "Mtm Value"]) df_activity = (df_activity.loc[df_activity['Record Type'] == "VM"]. set_index("Ticket Reference")) df_margin = pd.read_csv(DAILY_DIR / "SG_reports" / f"{d:%Y%m%d}_OTC_MARGIN_EX_DEF.csv", usecols=["Currency", "SG IMR"]) df_position = df_position.set_index("Ticket Reference") # expired_trades # df_position = df_position.append( # pd.DataFrame({"Reference Entity": 'CDX-NAIGS29V1-5Y', "Mtm Value": 0.}, # index=['T2201711010000A3K20000045561220U'])) df = df_activity.join(df_position) # expired trade (need to figure out how to get them from the report) # df.loc['N201811090000A3K215946925849228U1', 'Mtm Value'] = 0. # df.loc['N201811090000A3K215946925849228U1', 'Reference Entity'] = 'CDX-NAIGS31V1-5Y' df['Collateral'] = df['Mtm Value'] - df['Amount'] ref_entity = df['Reference Entity'].str.split("-", expand=True) del ref_entity[0] ref_entity.columns = ['to_split', 'tenor'] ref_entity = ref_entity.join(ref_entity['to_split'].str. extract("(IG|HY|EUROPE)S(\d+)V(\d+)$", expand=True)) del ref_entity['to_split'] ref_entity.columns = ['tenor', 'index_type', 'series', 'version'] ref_entity.index_type[ref_entity.index_type == "EUROPE"] = "EU" df = df.join(ref_entity) df = df.groupby(['index_type', 'series', 'tenor'])['Collateral'].sum() positions = pd.read_sql_query("SELECT security_desc, folder, notional, currency " "FROM list_cds_positions_by_strat(%s)", dbengine("dawndb"), params=(d.date(),)) instruments = positions.security_desc.str.split(expand=True)[[1, 3, 4]] instruments.columns = ['index_type', 'series', 'tenor'] instruments.series = instruments.series.str.extract("S(\d+)") instruments.index_type[instruments.index_type == "EUR"] = "EU" positions = positions.join(instruments) del positions['security_desc'] positions = positions.set_index(['index_type', 'series', 'tenor']) df = positions.join(df) def f(g): g.Collateral = g.Collateral * g.notional / g.notional.sum() return g df = (df.groupby(level=['index_type', 'series', 'tenor']). apply(f)) df = df.groupby(['folder']).agg({'Collateral': 'sum', 'currency': 'first'}) df = df.reset_index('folder') df = df.rename(columns={'folder': 'Strategy', 'currency': 'Currency', 'Collateral': 'Amount'}) df.Strategy = df.Strategy.map({'HEDGE_MBS': 'MBSCDSCSH', 'SER_ITRXCURVE': 'SER_ITRXCVCSH', 'SER_IGCURVE': 'SER_IGCVECSH', 'HYOPTDEL': 'HYCDSCSH', 'IGOPTDEL': 'IGCDSCSH'}) df_margin['account'] = 'SGNSCLMASW' df_margin = df_margin.rename(columns={'SG IMR': 'amount', 'Currency': 'currency'}) df_margin['date'] = d try: df_margin.to_sql("fcm_im", dbengine("dawndb"), if_exists='append', index=False) except IntegrityError: pass return df def ms_collateral(d): df = pd.read_excel(DAILY_DIR / "MS_reports" / f"Collateral_Detail_{d:%Y%m%d}.xls") collat = df.loc[1, 'coll_val_ccy'].replace(",", "") if "(" in collat: collat = collat[1:-1] collat = - float(collat) else: collat = float(collat) df = pd.read_excel(DAILY_DIR / "MS_reports" / f"Trade_Detail_{d:%Y%m%d}.xls") d = {'TRCDX': 'IGTCDSCSH', 'ABINT': 'MBSCDSCSH', 'ACSWN': 'IRDEVCSH', 'ABOPB': 'HYCDSCSH', 'ACUSD': 'IRDEVCSH'} df.trade_book = df.trade_book.replace(d) df = df.groupby('trade_book')[["collat_req_in_agr_ccy"]].sum() df['Currency'] = "USD" df = df.reset_index() col_names= ['Strategy', 'Amount', 'Currency'] df.columns = col_names return pd.concat([df, pd.DataFrame. from_records([('M_CSH_CASH', -collat - df.Amount.sum(), "USD")], columns=col_names)]) def load_gs_file(d, pattern): try: fname = next((DAILY_DIR / "GS_reports"). glob(f"{pattern}*{d.strftime('%d_%b_%Y')}*")) except StopIteration: raise FileNotFoundError(f"GS {pattern} file not found for date {d}") df = pd.read_excel(fname, skiprows=9, skipfooter=77) return df def gs_collateral(d): df = load_gs_file(d, "Collateral_Detail") collateral = float(df.Quantity) df = load_gs_file(d, "Trade_Detail") df = df[['Transaction Type', 'NPV (USD)', 'Initial Margin Required']] df = df.groupby('Transaction Type').sum() df = df.sum(axis=1).to_frame(name='Amount') df['Currency'] = 'USD' df = df.reset_index() df.columns = ['Strategy', 'Amount', 'Currency'] # TODO: need to break down SWO_CDIXOP between IG and HY df.Strategy = df.Strategy.replace({'SWAP_CDINDT': 'IGTCDSCSH', 'GENERIC_IRSIRD': 'IRDEVCSH', 'SWO_CDIXOP': 'IGCDSCSH', 'FX': 'M_CSH_CASH'}) df.Amount *= -1 df = df.append({'Strategy': "M_CSH_CASH", 'Amount': -collateral - df.Amount.sum(), 'Currency': "USD"}, ignore_index=True) df = df.groupby('Strategy').agg({'Amount': 'sum', 'Currency': 'first'}).reset_index() return df def send_email(df_ms, df_baml, df_gs): pd.set_option('display.float_format', '{:.2f}'.format) content = HTMLBody('' '

At Morgan Stanley:

' '{}' '

At Bank of America Merrill Lynch:

' '{}' '

At Goldman Sachs:

' '{}' ''.format(df_ms.to_html(index=False), df_baml.to_html(index=False), df_gs.to_html(index=False))) em = ExchangeMessage() em.send_email("IAM booking", content, ['serenitas.otc@sscinc.com'], ['nyops@lmcg.com']) if __name__ == "__main__": download_ms_emails() download_gs_emails() download_citi_emails() d = (pd.Timestamp.today() - BDay()).normalize() #download_sftp_files(d) download_baml_files() try: df_ms = ms_collateral(d) except FileNotFoundError as e: logging.info(e) df_ms = ms_collateral(d - BDay()) #df_sg = sg_collateral(d) df_baml = baml_collateral(d) try: df_gs = gs_collateral(d) except FileNotFoundError as e: logging.info(e) df_gs = gs_collateral(d - BDay()) send_email(df_ms, df_baml, df_gs)