aboutsummaryrefslogtreecommitdiffstats
path: root/python/strat_cash_realloc.py
blob: c73ea3ddd6dfcac2bfeb63a8e11f696aa96125c3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
from serenitas.utils.db import dbconn
from serenitas.utils.env import DAILY_DIR
from headers import get_headers
from process_queue import get_filepath
import datetime
import csv
from io import StringIO
from serenitas.analytics.dates import prev_business_day


def rename_keys(d, mapping):
    """rename keys in dictionary according to mapping dict inplace"""
    for k, v in mapping.items():
        if k in d:
            d[v] = d.pop(k)


def process_upload(trades):
    buf = StringIO()
    csvwriter = csv.writer(buf)
    headers = get_headers("wire", "BOWDST")
    csvwriter.writerow(headers)
    csvwriter.writerows(trades)
    buf = buf.getvalue().encode()
    dest = get_filepath(DAILY_DIR, "wire", "BOWDST")
    dest.parent.mkdir(exist_ok=True)
    dest.write_bytes(buf)
    sftp = SftpClient.from_creds("hm_globeop")
    sftp.client.chdir("incoming")
    sftp.put(buf, dest.name)


def generate_csv(date, conn):
    headers = get_headers("wire", "BOWDST")
    with conn.cursor() as c, conn.cursor() as d:
        c.execute(
            "SELECT vr.*, accounts2.custodian, accounts2.cp_code, pfm.folder AS dirty_strat FROM (SELECT sum(endqty) AS endqty, port, strat, custacctname, invid FROM valuation_reports v WHERE periodenddate ='2022-05-19' AND fund='BOWDST' AND invid IN ('USD', 'EUR') AND port NOT IN ('GFS_HELPER_BUSINESS_UNIT', 'CASH') GROUP BY (port, strat,custacctname, invid) HAVING sum(endqty) !=0) vr LEFT JOIN accounts2 ON custacctname=cash_account LEFT JOIN portfolio_folder_mapping pfm ON vr.strat::text=pfm.clean_folder where pfm.folder is not NULL;",
            (date,),
        )
        trades = []
        for row in c:
            if row.strat not in CASH_STRATEGY_MAPPING.keys():
                d.execute(
                    "INSERT INTO strat_cash_realloc (portfolio, folder, trade_date, amount, currency, fund, cash_account) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING dealid",
                    (
                        row.port,
                        row.strat,
                        date,
                        row.endqty,
                        row.invid,
                        "BOWDST",
                        row.custacctname,
                    ),
                )
                (dealid,) = d.fetchone()
                obj = row._asdict()
                rename_keys(
                    obj,
                    {
                        "invid": "Currency",
                        "custacctname": "Cash Account",
                        "custodian": "Custodian",
                        "cp_code": "Counterparty",
                        "dirty_strat": "Folder",
                    },
                )
                obj["Deal Type"] = "CashFlowDeal"
                obj["Deal Id"] = dealid
                obj["Action"] = "NEW"
                obj["Client"] = "HEDGEMARK"
                obj["Fund"] = "BOS_PAT_BOWDOIN"
                obj["State"] = "Valid"
                obj["Trade Date"] = date
                obj["Settlement Date"] = date
                obj["Transaction Type"] = "Transfer"
                obj["Instrument Type"] = "Cashflow"
                obj["Amount"] = -obj["endqty"]
                offset = obj.copy()
                # create second leg
                offset["Deal Id"] = obj["Deal Id"] + "_O"
                offset["Amount"] = -obj["Amount"]
                offset["Folder"] = STRATEGY_CASH_MAPPING[obj["Folder"]]
                trades.append([obj.get(h, None) for h in headers])
                trades.append([offset.get(h, None) for h in headers])
    conn.commit()
    if trades:
        process_upload(trades)


CASH_STRATEGY_MAPPING = {
    "COCSH": ["IGREC", "IGPAYER", "HYPAYER", "HYREC", "HYOPTDEL", "IGOPTDEL"],
    "IRDEVCSH": ["DV01", "STEEP", "FLAT"],
    "TCSH": [
        "IGMEZ",
        "IGSNR",
        "IGEQY",
        "HYMEZ",
        "HYEQY",
        "HYSNR",
        "BSPK",
        "XOMEZ",
        "XOEQY",
        "IGINX",
        "HYINX",
        "XOINX",
        "EUMEZ",
        "EUINX",
    ],
    "MBSCDSCSH": [
        "HEDGE_MBS",
        "MBSCDS",
    ],
    "M_CSH_CASH": [
        "CRT_LD",
        "CRT_LD_JNR",
        "CRT_SD",
        "MTG_FP",
        "MTG_LMG",
    ],
    "MACCDSCSH": ["HEDGE_MAC", "CASH_BASIS"],
    "CVECSH": ["ITRXCURVE", "IGCURVE", "HYCURVE", "XCURVE"],
    "CLOCDSCSH": ["HEDGE_CLO", "CLO_BB20"],
}

STRATEGY_CASH_MAPPING = {e: k for k, v in CASH_STRATEGY_MAPPING.items() for e in v}

if __name__ == "__main__":
    import argparse

    parser = argparse.ArgumentParser()
    parser.add_argument(
        "workdate",
        nargs="?",
        type=datetime.date.fromisoformat,
        default=prev_business_day(datetime.date.today()),
        help="working date",
    )
    args = parser.parse_args()

    conn = dbconn("dawndb")
    generate_csv(args.workdate, conn)