aboutsummaryrefslogtreecommitdiffstats
path: root/python/bowdst_monthend_recon.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/bowdst_monthend_recon.py')
-rw-r--r--python/bowdst_monthend_recon.py243
1 files changed, 243 insertions, 0 deletions
diff --git a/python/bowdst_monthend_recon.py b/python/bowdst_monthend_recon.py
new file mode 100644
index 00000000..ec966f72
--- /dev/null
+++ b/python/bowdst_monthend_recon.py
@@ -0,0 +1,243 @@
+import pandas as pd
+from serenitas.utils.db import dbconn
+import argparse
+from serenitas.utils.exchange import ExchangeMessage
+from exchangelib import FileAttachment
+from io import StringIO
+from serenitas.utils.env import DAILY_DIR
+from serenitas.utils.db import dbconn, dawn_engine
+import datetime
+from pandas.tseries.offsets import BDay
+import numpy as np
+import pandas_market_calendars as mcal
+
+
+def get_dir(date):
+ p = DAILY_DIR / "BOWD_recon" / f"{date:%Y_%m}"
+ return p
+
+
+def clear_date(date, conn):
+ with conn.cursor() as c:
+ c.execute("DELETE FROM bowdst_val bv WHERE as_of_date = %s;", (date,))
+ conn.commit()
+
+
+def load_val_report(date, conn):
+ p = get_dir(date) / f"Asset_Detail.csv"
+ df = pd.read_csv(
+ p, thousands=",", parse_dates=["As Of Date", "Maturity Date", "Report Run Date"]
+ )
+ df = df.drop(
+ [
+ "Reporting Account Number",
+ "Reporting Account Name",
+ "Source Account Name",
+ "Xref Security ID",
+ "Country Name",
+ "Country Code",
+ "Local Currency Name",
+ "Acct Base Currency Name",
+ "Acct Base Currency Code",
+ "CINS",
+ "Issuer ID",
+ "SEDOL",
+ "Valoren",
+ "Sicovam",
+ "WPK",
+ "Quick",
+ "Underlying Sec ID",
+ "Loan ID",
+ "Manager",
+ "Book Yield Value",
+ "Counterparty",
+ "Ticker with Exchange Code",
+ "Ticker with Yellow Key",
+ "Accounting Status",
+ "Primary GSP Account",
+ "Extended GSP Account Number",
+ "Percent Of Total",
+ ],
+ axis=1,
+ )
+ if "Acctg Status Update (EDT)" in df:
+ del df["Acctg Status Update (EDT)"]
+ elif "Acctg Status Update (EST)" in df:
+ del df["Acctg Status Update (EST)"]
+ df["Source Account Number"] = df["Source Account Number"].str[-4:].astype("int")
+ df.columns = df.columns.str.replace(" ", "_").str.lower()
+ df = df.rename(
+ columns={
+ "shares/par": "current_notional",
+ "local_unrealized_gain/loss": "local_unrealized_pnl",
+ "base_unrealized_gain/loss": "base_unrealized_pnl",
+ }
+ )
+ for col in [
+ "current_notional",
+ "local_price",
+ "base_price",
+ "local_cost",
+ "base_cost",
+ "local_market_value",
+ "base_market_value",
+ "local_unrealized_pnl",
+ "base_unrealized_pnl",
+ "local_notional_cost",
+ "base_notional_cost",
+ "local_notional_value",
+ "base_notional_value",
+ ]:
+ if df[col].dtype != "float64":
+ df[col] = df[col].apply(lambda s: "-" + s[1:-1] if s.startswith("(") else s)
+ df[col] = pd.to_numeric(df[col].str.replace(",", ""))
+ df["row"] = df.index
+ clear_date(date, conn)
+ df.to_sql("bowdst_val", dawn_engine, if_exists="append", index=False)
+
+
+def difference(df):
+ if ("db_mv" in df.columns) and ("db_notional" in df.columns):
+ df["mv_difference"] = df["db_mv"] - df["admin_mv"]
+ df["notional_difference"] = df["db_notional"] - df["admin_notional"]
+ elif "db_mv" in df.columns:
+ df["mv_difference"] = df["db_mv"] - df["admin_mv"]
+ elif "db_notional" in df.columns:
+ df["notional_difference"] = df["db_notional"] - df["admin_notional"]
+ return df
+
+
+def sums(df):
+ if ("db_mv" in df.columns) and ("db_notional" in df.columns):
+ return df[["db_mv", "admin_mv", "db_notional", "admin_notional"]].sum()
+ elif "db_mv" in df.columns:
+ return df[["db_mv", "admin_mv"]].sum()
+ elif "db_notional" in df.columns:
+ return df[["db_notional", "admin_notional"]].sum()
+
+
+def recon(hierarchy_file, date):
+ df = pd.read_excel(hierarchy_file)
+ bowd_bond_trades = df[df["CUSIP"].notnull()]
+ bond_asset_classes = ["Subprime", "CRT", "CLO"]
+ bond_trades_combined = []
+ for asset in bond_asset_classes:
+ db_bond_trades = pd.read_sql_query(
+ f"select * from risk_positions(%s, %s, 'BOWDST')",
+ dawndb,
+ params=(date, asset),
+ )
+
+ bond_trades = bowd_bond_trades.merge(
+ db_bond_trades,
+ left_on="Mellon Security ID",
+ right_on="identifier",
+ how="right",
+ )[
+ [
+ "description",
+ "identifier",
+ "notional",
+ "factor",
+ "Shares/Par",
+ "Base Market Value",
+ "usd_market_value",
+ ]
+ ]
+ bond_trades["db_notional"] = bond_trades["notional"] * bond_trades["factor"]
+ bond_trades.rename(
+ columns={
+ "usd_market_value": "db_mv",
+ "Shares/Par": "admin_notional",
+ "Base Market Value": "admin_mv",
+ },
+ inplace=True,
+ )
+ bond_trades_combined.append(bond_trades)
+ bond_trades_combined = pd.concat(bond_trades_combined)
+
+ tranche_trades = pd.read_sql_query(
+ f"select security_desc, maturity, orig_attach, orig_detach, sum(notional * tranche_factor) as db_notional, sum(admin_notional) as admin_notional, sum(serenitas_clean_nav) as db_mv, sum(admin_clean_nav) as admin_mv from tranche_risk_bowdst where date=%s group by security_desc, maturity, orig_attach, orig_detach ;",
+ dawndb,
+ params=(last_bus_day(date),),
+ )
+
+ cdx_trades = pd.read_sql_query(
+ f"select security_id, security_desc, index, series, version, maturity, globeop_notional as admin_notional, notional * factor as db_notional, clean_nav as db_mv, globeop_nav as admin_mv from list_cds_marks(%s, null, 'BOWDST')",
+ dawndb,
+ params=(date,),
+ )
+
+ cdx_swaption_trades = pd.read_sql_query(
+ f"select security_id, option_type, strike, expiration_date, sum(serenitas_nav) as db_mv, sum(globeop_nav) as admin_mv from list_swaption_positions_and_risks(%s, 'BOWDST') group by security_id, option_type, strike, expiration_date;",
+ dawndb,
+ params=(date,),
+ )
+
+ ir_swaption_trades = pd.read_sql_query(
+ "SELECT deal_id, option_type, strike, SECURITY_Id, expiration_date, notional AS db_notional, current_notional AS admin_notional, nav AS db_mv, base_market_value AS admin_mv FROM list_ir_swaption_positions(%s, 'BOWDST') LEFT JOIN bowdst_val ON deal_id=link_ref WHERE as_of_date=%s;",
+ dawndb,
+ params=(last_bus_day(date), date),
+ )
+
+ kinds = [
+ bond_trades_combined,
+ tranche_trades,
+ cdx_trades,
+ cdx_swaption_trades,
+ ir_swaption_trades,
+ ]
+ names = [
+ "bond_trades",
+ "tranche_trades",
+ "cdx_trades",
+ "cdx_swaption_trades",
+ "ir_swaption_trades",
+ ]
+ overview = []
+ em = ExchangeMessage()
+ attachments = []
+ for kind, name in zip(kinds, names):
+ buf = StringIO()
+ difference(kind.round(decimals=0).fillna(0)).to_csv(buf)
+ attachments.append(
+ FileAttachment(name=f"{name}_{date}.csv", content=buf.getvalue().encode())
+ )
+ pd.set_option("display.float_format", lambda x: "%.2f" % x)
+ df = pd.DataFrame(sums(kind), columns=["sums"])
+ df["name"] = name
+ df.set_index("name")
+ overview.append(df)
+ buf = StringIO()
+ pd.concat(overview).round(decimals=0).to_csv(buf)
+ attachments.append(
+ FileAttachment(name=f"overview.csv", content=buf.getvalue().encode())
+ )
+ em.send_email(
+ subject=f"Notional Totals {date}",
+ body="See attached",
+ to_recipients=("fyu@lmcg.com",),
+ attach=attachments,
+ )
+
+
+def last_bus_day(date):
+ holidays = mcal.get_calendar("NYSE").holidays().holidays
+ if date in holidays:
+ return (date - BDay(1)).date()
+ elif not np.is_busday(date):
+ return (date - BDay(1)).date()
+ else:
+ return date
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument("end_date", type=datetime.date.fromisoformat)
+args = parser.parse_args()
+
+if __name__ == "__main__":
+ dawndb = dbconn("dawndb")
+ date = args.end_date
+ load_val_report(date, dawndb)
+ hierarchy_file = get_dir(date) / "hierarchy.xls"
+ recon(hierarchy_file, date)