from . import DAILY_DIR from bs4 import BeautifulSoup from io import BytesIO import logging import pandas as pd import pathlib from urllib.parse import urlsplit, parse_qs, urlunsplit, urljoin import requests import zipfile logger = logging.getLogger(__name__) def download_from_secure_id( secure_id: str, brand: str, path: pathlib.Path, base_url="https://secmail.bankofamerica.com", ): password = { "ghorel@lmcg.com": "v4vdMvH9Qe9t", "nyops@lmcg.com": "a6lAkBfqDSHsrkGspYSS", } payload = {} with requests.Session() as session: r = session.get( urljoin(base_url, "formpostdir/securereader"), params={"id": secure_id, "brand": brand}, ) soup = BeautifulSoup(r.content, features="lxml") form = soup.find(id="dialog") if "messagenotfound" in form["action"]: raise ValueError("message not found") for inp in form.find_all("input"): payload[inp["name"]] = inp["value"] payload["dialog:password"] = password[payload["dialog:username"]] r = session.post(base_url + form["action"], data=payload) soup = BeautifulSoup(r.content, features="lxml") form = soup.find(id="readTB") payload = { "readTB": "readTB", "readTB:downloadZipButton": "readTB:downloadZipButton", } for inp in form.find_all("input"): if "ViewState" in inp["name"]: payload["javax.faces.ViewState"] = inp["value"] r = session.post(urljoin(base_url, "securereader/read.jsf"), data=payload) if r.headers["content-type"] == "application/octet-stream": with zipfile.ZipFile(BytesIO(r.content)) as z: for f in z.namelist(): if not f.endswith("html"): z.extract(f, path=path) def download_files(d=None, count=20): from exchange import ExchangeMessage DATA_DIR = DAILY_DIR / "BAML_ISDA_reports" em = ExchangeMessage() emails = em.get_msgs(path=["NYops", "Margin Calls Baml"], count=count) for msg in emails: if msg.sender.name == "us_otc_client_valuation@baml.com": soup = BeautifulSoup(msg.body, features="lxml") a = soup.find("a") url = urlsplit(a["href"]) query = parse_qs(url.query) base_url = urlunsplit(url[:2] + ("",) * 3) try: download_from_secure_id( query["id"][0], query["brand"][0], DATA_DIR, base_url ) except ValueError as e: logger.error(e) continue if msg.sender.name == "bank_of_america_collateral_operations@bankofamerica.com": for attach in msg.attachments: if attach.name.endswith("xls"): p = DATA_DIR / attach.name if not p.exists(): p.write_bytes(attach.content) def collateral(d, dawn_trades, *args): REPORTS_DIR = DAILY_DIR / "BAML_ISDA_reports" try: fname = next(REPORTS_DIR.glob(f"Collat_*{d:%m%d%Y}_*.xls")) except StopIteration: raise ValueError("no data for date {d}") df = pd.read_excel(fname, skiprows=6, skipfooter=6) df = df.drop(0, axis=0) collateral = float(df.Notional) fname = REPORTS_DIR / f"Interest Rates Trade Summary_{d:%d-%b-%Y}.xls" # TODO: make more robust df = pd.read_excel(fname, skiprows=6, nrows=1) df = df[["Trade ID", "MTM(USD)"]] df["Trade ID"] = df["Trade ID"].astype("str") df = df.merge(dawn_trades, how="left", left_on="Trade ID", right_on="cpty_id") missing_ids = df.loc[df.cpty_id.isnull(), "Trade ID"] if not missing_ids.empty: raise ValueError(f"{missing_ids.tolist()} not in the database") df = df[["folder", "MTM(USD)", "ia"]] df = df.groupby("folder").sum() df = (df["ia"] - df["MTM(USD)"]).to_frame(name="Amount") df["Currency"] = "USD" df = df.reset_index() df.columns = ["Strategy", "Amount", "Currency"] df.Amount *= -1 df = df.append( { "Strategy": "M_CSH_CASH", "Amount": -collateral - df.Amount.sum(), "Currency": "USD", }, ignore_index=True, ) df["date"] = d return df.set_index("Strategy")