1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
|
from . import DAILY_DIR
from bs4 import BeautifulSoup
from io import BytesIO
import pandas as pd
import pathlib
from urllib.parse import urlsplit, parse_qs, urlunsplit, urljoin
import requests
import zipfile
def download_from_secure_id(
secure_id: str,
brand: str,
path: pathlib.Path,
base_url="https://secmail.bankofamerica.com",
):
from exchange import ExchangeMessage
payload = {"dialog:username": "ghorel@lmcg.com", "dialog:password": "v4vdMvH9Qe9t"}
with requests.Session() as session:
r = session.get(
urljoin(base_url, "formpostdir/securereader"),
params={"id": secure_id, "brand": brand},
)
soup = BeautifulSoup(r.content, features="lxml")
form = soup.find(id="dialog")
for inp in form.find_all("input"):
if inp["name"] not in ["dialog:username", "dialog:password"]:
payload[inp["name"]] = inp["value"]
r = session.post(base_url + form["action"], data=payload)
soup = BeautifulSoup(r.content, features="lxml")
form = soup.find(id="readTB")
payload = {
"readTB": "readTB",
"readTB:downloadZipButton": "readTB:downloadZipButton",
}
for inp in form.find_all("input"):
if "ViewState" in inp["name"]:
payload["javax.faces.ViewState"] = inp["value"]
r = session.post(urljoin(base_url, "securereader/read.jsf"), data=payload)
if r.headers["content-type"] == "application/octet-stream":
with zipfile.ZipFile(BytesIO(r.content)) as z:
for f in z.namelist():
if not f.endswith("html"):
z.extract(f, path=path)
def download_files(d=None, count=20):
DATA_DIR = DAILY_DIR / "BAML_ISDA_reports"
em = ExchangeMessage()
emails = em.get_msgs(path=["NYops", "Margin Calls Baml"], count=count)
for msg in emails:
if msg.sender.name == "us_otc_client_valuation@baml.com":
soup = BeautifulSoup(msg.body, features="lxml")
a = soup.find("a")
url = urlsplit(a["href"])
query = parse_qs(url.query)
base_url = urlunsplit(url[:2] + ("",) * 3)
download_from_secure_id(
query["id"][0], query["brand"][0], DATA_DIR, base_url
)
def collateral(d, dawn_trades, *args):
fname = (
DAILY_DIR
/ "BAML_ISDA_reports"
/ f"Interest Rates Trade Summary_{d:%d-%b-%Y}.xls"
)
df = pd.read_excel(fname, skiprows=6, skipfooter=102)
df = df[["Trade ID", "MTM(USD)"]]
df["Trade ID"] = df["Trade ID"].astype("str")
df = df.merge(dawn_trades, how="left", left_on="Trade ID", right_on="cpty_id")
missing_ids = df.loc[df.cpty_id.isnull(), "Trade ID"]
if not missing_ids.empty:
raise ValueError(f"{missing_ids.tolist()} not in the database")
df = df[["folder", "MTM(USD)", "ia"]]
df = df.groupby("folder").sum()
df = (df["ia"] - df["MTM(USD)"]).to_frame(name="Amount")
df["Currency"] = "USD"
df = df.reset_index()
df.columns = ["Strategy", "Amount", "Currency"]
df.Amount *= -1
collateral = 430_000
df = df.append(
{
"Strategy": "M_CSH_CASH",
"Amount": -collateral - df.Amount.sum(),
"Currency": "USD",
},
ignore_index=True,
)
df["date"] = d
return df.set_index("Strategy")
|