1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
|
import datetime
import csv
from io import StringIO
from serenitas.utils.misc import rename_keys
from serenitas.utils.remote import SftpClient
from serenitas.utils.env import DAILY_DIR
from pandas.tseries.offsets import MonthEnd
from serenitas.utils.exchange import ExchangeMessage, FileAttachment
from csv_headers.globeop_upload import POSITION_HEADERS
from serenitas.analytics.dates import prev_business_day
_otc_queries = {
"Tranche": "SELECT trb.trade_id, trb.serenitas_clean_nav + trb.serenitas_accrued as mtm, trb.notional * trb.tranche_factor as active_notional, cds.*, COALESCE(nextredindexcode, security_id) AS redindexcode FROM tranche_risk_bowdst trb left join cds on trade_id=id LEFT JOIN index_version_markit ivm ON security_id=redindexcode WHERE date=%s;",
"CDXSwaption": "SELECT abs(spr.notional) AS active_notional, spr.serenitas_nav as nav, swaptions.*, index_version_markit.annexdate FROM list_swaption_positions_and_risks(%s, 'BOWDST') spr LEFT JOIN swaptions ON deal_id=dealid LEFT JOIN index_version_markit ON swaptions.security_id=redindexcode;",
"IRSwaption": "SELECT abs(spr.notional) AS active_notional, spr.nav, swaptions.*, index_version_markit.effectivedate FROM list_ir_swaption_positions(%s, 'BOWDST') spr LEFT JOIN swaptions ON deal_id=dealid LEFT JOIN index_version_markit ON swaptions.security_id=redindexcode;",
"CDX": "SELECT cds.*, ivm.effectivedate FROM list_cds_marks(%s, null, 'BOWDST') cds LEFT JOIN index_version_markit ivm ON security_id=redindexcode;",
}
def build_line(obj, asset_type):
return [obj.get(h, None) for h in POSITION_HEADERS[asset_type]]
def process_upload(positions, upload):
attachments = []
if upload:
sftp = SftpClient.from_creds("hm_globeop", folder="incoming")
for asset_type, trades in positions.items():
buf = StringIO()
csvwriter = csv.writer(buf)
csvwriter.writerow(POSITION_HEADERS[asset_type])
csvwriter.writerows(build_line(trade, asset_type) for trade in trades)
buf = buf.getvalue().encode()
timestamp = datetime.datetime.now()
fname = f"HEDGEMARK.POSITION.BOS_PAT_BOWDOIN.{timestamp:%Y%m%d.%H%M%S}.{asset_type.capitalize()}Deal.PositionsAsOf{args.date}.csv"
if upload:
sftp.put(buf, fname)
base_dir = DAILY_DIR / f"{timestamp:%Y-%m-%d}"
base_dir.mkdir(exist_ok=True, parents=True)
dest = base_dir / fname
dest.write_bytes(buf)
attachments.append(FileAttachment(name=fname, content=buf))
if upload:
em = ExchangeMessage()
recipients = (
"hm-operations@bnymellon.com",
"hedgemark.lmcg.ops@sscinc.com",
"Hedgemark.OTC@sscinc.com",
"catherine.porter@bnymellon.com",
"shkumar@sscinc.com",
)
cc_recipients = ("bowdoin-ops@lmcg.com",)
subject = f"Position_files for Bowdoin Street as of {args.date}"
body = f"Please see monthend positions for Bowdoin Street as of {args.date}. They have been uploaded to the SFTP as well."
em.send_email(
subject, body, recipients, cc_recipients=cc_recipients, attach=attachments
)
def positions_bond(conn, date):
with conn.cursor() as c:
c.execute("SELECT * FROM risk_positions(%s, null, 'BOWDST') ", (date,))
for row in c:
obj = row._asdict()
rename_keys(
obj,
{
"identifier": "CUSIP",
"description": "SecurityDescription",
"notional": "Position",
"price": "MarketPrice",
"local_market_value": "Local Market Value",
"usd_market_value": "Base Market Value",
},
)
try:
obj["Fx Rate"] = obj["Local Market Value"] / obj["Base Market Value"]
except ZeroDivisionError:
obj["Fx Rate"] = 1
obj["AccountNumber"] = "319478"
obj["Prime Broker"] = "BONY"
obj["COB Date"] = date
obj["Currency"] = "USD"
obj["SecurityType"] = "Bond"
obj["CurrentFace"] = obj["Position"] * obj["factor"]
yield obj
def positions_future(conn, date):
with conn.cursor() as c:
c.execute(
"WITH tmp AS (SELECT bbg_ticker, fund, security_desc, currency, maturity, sum(quantity * (2*buysell::int-1)) OVER (PARTITION BY bbg_ticker, fund, security_desc, currency, maturity) notional FROM futures "
"WHERE fund='BOWDST' AND trade_date <= %s) "
"SELECT bbg_ticker, notional, code AS cp_code, cash_account, security_desc, currency, maturity FROM tmp LEFT JOIN accounts USING (fund) WHERE tmp.notional != 0 AND account_type='Future';",
(date,),
)
for row in c:
obj = row._asdict()
rename_keys(
obj,
{
"bbg_ticker": "BBGTicker",
"notional": "Quantity",
"cp_code": "Prime Broker",
"cash_account": "AccountNumber",
"security_desc": "SecurityDescription",
"currency": "Currency",
"maturity": "MaturityDate",
},
)
obj["COB Date"] = date
obj["SecurityType"] = "Futures"
yield obj
def _otc_serialize(obj, product_type, date):
rename_keys(
obj,
{
"dealid": "Unique Deal ID",
"cp_code": "Counterparty",
"currency": "DealCurrencyA",
"active_notional": "NotionalA",
"fixed_rate": "FixedRate",
"trade_date": "Start Date",
"effective_date": "EffectiveDate",
"maturity": "Maturity Date",
"security_id": "Underlying (ISIN / CUSP / RED CODES)",
"security_desc": "Underlying Desc",
"mtm": "MTM Valuation",
"strike": "Strike",
"annexdate": "EffectiveDate",
"expiration_date": "Underlying Maturity",
"nav": "MTM Valuation",
},
)
data = {
"Client Name": "HEDGEMARK",
"Fund Name": "BOS_PAT_BOWDOIN",
"Product Type": "Credit Index Tranche",
"MTM Currency": "USD",
"COB Date": date,
}
obj.update(data)
if product_type == "Tranche":
obj["Underlying (ISIN / CUSP / RED CODES)"] = obj["redindexcode"]
obj["Product Type"] = "Credit Index Tranche"
obj["TransactionIndicator (Buy/Sell)"] = (
"B" if obj["protection"] == "Buyer" else "S"
)
elif product_type in ("CDXSwaption", "IRSwaption"):
obj["Product Type"] = (
"CD Swaption" if product_type == "CDXSwaption" else "Swaption"
)
obj["TransactionIndicator (Buy/Sell)"] = "B" if obj["buysell"] else "S"
obj["PutCall Indicator (Call/Put)"] = (
"P" if obj["option_type"] == "PAYER" else "C"
)
obj["Exercise Type"] = "European"
elif product_type == "CDX":
obj["Product Type"] = "Credit Index"
obj["Counterparty"] = "GS"
obj["Unique Deal ID"] = obj[
"Underlying (ISIN / CUSP / RED CODES)"
] # Different from rest, we will override
obj["TransactionIndicator (Buy/Sell)"] = "B" if obj["notional"] > 0 else "S"
obj["DealCurrencyA"] = "EUR" if obj["index"] in ("EU", "XO") else "USD"
obj["NotionalA"] = abs(obj["notional"]) * obj["factor"]
obj["Start Date"] = date
obj["MTM Valuation"] = obj["clean_nav"] + obj["accrued"]
obj["Clearing House Name"] = "ICE"
obj["FixedRate"] = obj["coupon"] * 100
obj["Effective Date"] = obj["effectivedate"]
return obj
def positions_otc(conn, date):
with conn.cursor() as c:
for product_type, sql_query in _otc_queries.items():
c.execute(sql_query, (date,))
for row in c:
yield _otc_serialize(row._asdict(), product_type, date)
if __name__ == "__main__":
import argparse
from serenitas.utils.db import dbconn
parser = argparse.ArgumentParser(
description="Generate position files for Bowdoin Street"
)
parser.add_argument(
"date",
nargs="?",
type=datetime.date.fromisoformat,
default=prev_business_day((datetime.date.today().replace(day=1))),
)
parser.add_argument(
"--product",
nargs="+",
choices=["bond", "future", "otc"],
default=["bond", "future", "otc"],
help="list of products to generate position files for",
)
parser.add_argument(
"--no-upload",
"-n",
action="store_true",
default=False,
help="uploads to globeop",
)
args = parser.parse_args()
conn = dbconn("dawndb")
positions = {
p: list(globals()[f"positions_{p}"](conn, args.date)) for p in args.product
}
if (
not prev_business_day(datetime.date.today()) == args.date and not args.no_upload
): # We only want to upload if the previous business day was monthend
pass
else:
process_upload(positions, not args.no_upload)
|