aboutsummaryrefslogtreecommitdiffstats
path: root/python/external_deriv_marks.py
blob: e55d6b27e59164af83a5275a0d51846dc1ff4e64 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
import datetime
import pandas as pd
import re
from env import DAILY_DIR


def gs_navs(date: datetime.date = None):
    d = {}
    date_str = date.strftime("%d_%b_%Y") if date else ""
    for fname in (DAILY_DIR / "GS_reports").glob(f"Trade_Detail*{date_str}*.xls"):
        try:
            df = pd.read_excel(fname, skiprows=9, skipfooter=77, index_col="Trade Id")
        except ValueError:
            continue
        df = df.dropna(subset=["GS Entity"])
        df["Trade Date"] = pd.to_datetime(df["Trade Date"])
        df = df[["Trade Date", "Buy/Sell", "Notional (USD)", "NPV (USD)"]]
        df.columns = ["trade_date", "buy/sell", "notional", "nav"]
        name = fname.name.replace("9972734", "")
        m = re.match(r"[^\d]*(\d{2}_.{3}_\d{4})", name)
        if m:
            date_string, = m.groups()
            date = datetime.datetime.strptime(date_string, "%d_%b_%Y")
        d[date] = df
    df = pd.concat(d)
    # nav is from Goldman's point of view
    df.nav *= -1.0
    return df


def ms_navs(date: datetime.date = None):
    d = {}
    date_str = date.strftime("%Y%m%d") if date else "*"
    for fname in (DAILY_DIR / "MS_reports").glob(f"Trade_Detail_{date_str}.xls"):
        df = pd.read_excel(fname, index_col="trade_id")
        df.trade_date = pd.to_datetime(df.trade_date)
        df = df[
            ["trade_date", "pay_rec", "notional_in_trade_ccy", "exposure_in_rpt_ccy"]
        ]
        df.columns = ["trade_date", "buy/sell", "notional", "nav"]
        m = re.match(r"[^\d]*(\d{8})", fname.name)
        if m:
            date_string, = m.groups()
            date = datetime.datetime.strptime(date_string, "%Y%m%d")
        d[date] = df
    return pd.concat(d)


def citi_navs(date: datetime.date = None):
    d = {}
    glob_str = date.strftime("%Y%m%d*") if date else "*"
    for fname in (DAILY_DIR / "CITI_reports").glob(f"262966_Portfolio_{glob_str}.xlsx"):
        date_parsed = datetime.datetime.strptime(
            fname.stem.rsplit("_", 1)[1][:-3], "%Y%m%d%H%M%S%f"
        )
        df = pd.read_excel(
            fname, skiprows=6, skipfooter=2, parse_dates=["Trade Date", "Value Date"]
        )
        df = df.dropna(subset=["Operations File"]).set_index(
            ["Value Date", "Operations File"]
        )
        df = df[["Trade Date", "Party Position", "Notional", "Market Value"]]
        df.columns = ["trade_date", "buy/sell", "notional", "nav"]
        d[date_parsed] = df
    # there can be multiple files per day, we take the latest one
    df = (
        pd.concat(d)
        .sort_index()
        .groupby(level=["Value Date", "Operations File"])
        .last()
    )
    # nav is from Citi's point of view
    df.nav *= -1.0
    return df


def baml_navs(date: datetime.date = None):
    d = {}
    glob_str = date.strftime("%d-%b-%Y") if date else "*"
    for fname in (DAILY_DIR / "BAML_ISDA_reports").glob(
        f"Interest Rates Trade Summary_{glob_str}.xls"
    ):
        date = datetime.datetime.strptime(fname.stem.split("_")[1], "%d-%b-%Y")
        df = pd.read_excel(fname, skiprows=6, nrows=1)
        df = df.set_index("Trade ID")
        df = df[["Trade Date", "Flow Direction", "Notional", "MTM(USD)"]]
        df.columns = ["trade_date", "buy/sell", "notional", "nav"]
        d[date] = df
    return pd.concat(d)


if __name__ == "__main__":
    import argparse
    import logging
    from utils.db import dbconn
    from pandas.tseries.offsets import BDay

    parser = argparse.ArgumentParser()
    parser.add_argument(
        "date",
        type=datetime.datetime.fromisoformat,
        nargs="?",
        default=datetime.date.today(),
    )
    parser.add_argument(
        "-a", "--all", action="store_true", default=False, help="download everything"
    )
    parser.add_argument(
        "-d", "--debug", action="store_true", default=False, help="more verbose logging"
    )
    args = parser.parse_args()
    date = None if args.all else args.date
    logging.basicConfig()
    logger = logging.getLogger("external_marks")
    logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
    for cp in ["MS", "CITI", "GS", "BAML"]:
        logger.info(cp)
        if cp != "CITI":
            date_arg = (date - BDay()).date()
        else:
            date_arg = date
        try:
            df = globals()[f"{cp.lower()}_navs"](date_arg)
        except ValueError:
            continue
        logger.debug(df)
        with dbconn("dawndb") as conn:
            with conn.cursor() as c:
                for k, v in df[["nav"]].iterrows():
                    c.execute(
                        "INSERT INTO external_marks_deriv "
                        "VALUES(%s, %s, %s, %s) ON CONFLICT DO NOTHING",
                        (*k, float(v), cp),
                    )