1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
|
from serenitas.utils.db import dbconn
from serenitas.utils.env import DAILY_DIR
from process_queue import get_filepath
import datetime
import csv
from io import StringIO
from serenitas.analytics.dates import prev_business_day
from serenitas.utils.remote import SftpClient
from collateral.common import CASH_STRATEGY_MAPPING, STRATEGY_CASH_MAPPING
from serenitas.utils.misc import rename_keys
from csv_headers.globeop_upload import HEADERS
def process_upload(trades):
buf = StringIO()
csvwriter = csv.writer(buf)
headers = HEADERS["wire"]
csvwriter.writerow(headers)
csvwriter.writerows(trades)
buf = buf.getvalue().encode()
dest = get_filepath(DAILY_DIR, "wire", "BOWDST")
dest.parent.mkdir(exist_ok=True)
dest.write_bytes(buf)
sftp = SftpClient.from_creds("hm_globeop")
sftp.client.chdir("incoming")
sftp.put(buf, dest.name)
def generate_csv(date, conn):
headers = HEADERS["wire"]
with conn.cursor() as c, conn.cursor() as d:
c.execute(
"SELECT vr.*, accounts2.custodian, accounts2.cp_code, pfm.folder AS dirty_strat FROM (SELECT sum(endqty) AS endqty, port, strat, custacctname, invid FROM valuation_reports v WHERE periodenddate =%s AND fund='BOWDST' AND invid IN ('USD', 'EUR') AND port NOT IN ('GFS_HELPER_BUSINESS_UNIT', 'CASH') GROUP BY (port, strat,custacctname, invid) HAVING sum(endqty) !=0) vr LEFT JOIN accounts2 ON custacctname=cash_account LEFT JOIN portfolio_folder_mapping pfm ON vr.strat::text=pfm.clean_folder where pfm.folder is not NULL;",
(date,),
)
trades = []
for row in c:
if row.strat not in CASH_STRATEGY_MAPPING.keys():
d.execute(
"INSERT INTO strat_cash_realloc (portfolio, folder, trade_date, amount, currency, fund, cash_account) VALUES (%s, %s, %s, %s, %s, %s, %s) RETURNING dealid",
(
row.port,
row.strat,
date,
row.endqty,
row.invid,
"BOWDST",
row.custacctname,
),
)
(dealid,) = d.fetchone()
obj = row._asdict()
rename_keys(
obj,
{
"invid": "Currency",
"custacctname": "Cash Account",
"custodian": "Custodian",
"cp_code": "Counterparty",
"dirty_strat": "Folder",
},
)
data = {
"Deal Type": "CashFlowDeal",
"Deal Id": dealid,
"Action": "NEW",
"Client": "HEDGEMARK",
"Fund": "BOS_PAT_BOWDOIN",
"State": "Valid",
"Trade Date": date,
"Settlement Date": date,
"Transaction Type": "Transfer",
"Instrument Type": "Cashflow",
"Amount": -obj["endqty"],
}
obj.update(data)
offset = obj.copy()
# create second leg
offset["Deal Id"] = obj["Deal Id"] + "_O"
offset["Amount"] = -obj["Amount"]
offset["Folder"] = STRATEGY_CASH_MAPPING[obj["Folder"]]
trades.append([obj.get(h, None) for h in headers])
trades.append([offset.get(h, None) for h in headers])
conn.commit()
if trades:
process_upload(trades)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"workdate",
nargs="?",
type=datetime.date.fromisoformat,
default=prev_business_day(datetime.date.today()),
help="working date",
)
args = parser.parse_args()
conn = dbconn("dawndb")
generate_csv(args.workdate, conn)
|