1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
|
import pandas as pd
from . import DAILY_DIR
paths = {
"Serenitas": ["NYops", "Margin calls"],
"Brinker": ["NYops", "Margin Calls GS-Brinker"],
"BowdSt": ["BowdoinOps", "Margin GS"],
}
def download_files(em, count=20, *, fund="Serenitas", **kwargs):
emails = em.get_msgs(path=paths[fund], count=count, subject__contains="Margin")
DATA_DIR = DAILY_DIR / fund / "GS_reports"
for msg in emails:
for attach in msg.attachments:
fname = attach.name
if fname.endswith("xls"):
p = DATA_DIR / fname
if not p.exists():
p.write_bytes(attach.content)
def load_file(d, fund, pattern):
try:
fname = next((DAILY_DIR / fund / "GS_reports").glob(f"{pattern}*{d:%d_%b_%Y}*"))
except StopIteration:
raise FileNotFoundError(f"GS {pattern} file not found for date {d}")
return pd.read_excel(fname, skiprows=9, skipfooter=77)
def collateral(d, dawn_trades, *, fund="Serenitas", **kwargs):
df = load_file(d, fund, "Collateral_Detail")
df = df.dropna(subset=["Quantity"])
try:
collateral = float(df.Quantity)
except TypeError:
collateral = df.Quantity.sum()
df = load_file(d, fund, "Trade_Detail")
df = df.dropna(subset=["GS Entity"])
df = df[df["Notional (USD)"] != 0.0]
df = df[["Trade Id", "Transaction Type", "NPV (USD)", "Initial Margin Required"]]
df = df.merge(dawn_trades, how="left", left_on="Trade Id", right_on="cpty_id")
missing_ids = df.loc[df.cpty_id.isnull(), "Trade Id"]
if not missing_ids.empty:
raise ValueError(f"{missing_ids.tolist()} not in the database")
df = df[["folder", "NPV (USD)", "Initial Margin Required"]]
df = df.groupby("folder").sum()
df = df.sum(axis=1).to_frame(name="Amount")
df["Currency"] = "USD"
df = df.reset_index()
df.columns = ["Strategy", "Amount", "Currency"]
df.Amount *= -1
df = df.append(
{
"Strategy": "M_CSH_CASH",
"Amount": -collateral - df.Amount.sum(),
"Currency": "USD",
},
ignore_index=True,
)
df["date"] = d
return df.set_index("Strategy")
|