aboutsummaryrefslogtreecommitdiffstats
path: root/python/monthend_recon_bowdst.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/monthend_recon_bowdst.py')
-rw-r--r--python/monthend_recon_bowdst.py255
1 files changed, 0 insertions, 255 deletions
diff --git a/python/monthend_recon_bowdst.py b/python/monthend_recon_bowdst.py
deleted file mode 100644
index a4d1919f..00000000
--- a/python/monthend_recon_bowdst.py
+++ /dev/null
@@ -1,255 +0,0 @@
-import pandas as pd
-from serenitas.utils.db import dbconn
-import argparse
-from serenitas.utils.exchange import ExchangeMessage
-from exchangelib import FileAttachment
-from io import StringIO
-from serenitas.utils.env import DAILY_DIR
-from serenitas.utils.db import dbconn, dawn_engine
-import datetime
-from pandas.tseries.offsets import BDay
-import numpy as np
-from dateutil.relativedelta import relativedelta
-from serenitas.analytics.dates import prev_business_day
-
-
-def get_dir(date):
- p = DAILY_DIR / "BOWD_recon" / f"{date:%Y_%m}"
- return p
-
-
-def clear_date(date, conn):
- with conn.cursor() as c:
- c.execute("DELETE FROM bowdst_val bv WHERE as_of_date = %s;", (date,))
- conn.commit()
-
-
-def load_val_report(date, conn):
- p = get_dir(date) / f"Asset Detail.csv"
- df = pd.read_csv(
- p, thousands=",", parse_dates=["As Of Date", "Maturity Date", "Report Run Date"]
- )
- df = df.drop(
- [
- "Reporting Account Number",
- "Reporting Account Name",
- "Source Account Name",
- "Xref Security ID",
- "Country Name",
- "Country Code",
- "Local Currency Name",
- "Acct Base Currency Name",
- "Acct Base Currency Code",
- "CINS",
- "Issuer ID",
- "SEDOL",
- "Valoren",
- "Sicovam",
- "WPK",
- "Quick",
- "Underlying Sec ID",
- "Loan ID",
- "Manager",
- "Book Yield Value",
- "Counterparty",
- "Ticker with Exchange Code",
- "Ticker with Yellow Key",
- "Accounting Status",
- "Primary GSP Account",
- "Extended GSP Account Number",
- "Percent Of Total",
- ],
- axis=1,
- )
- if "Acctg Status Update (EDT)" in df:
- del df["Acctg Status Update (EDT)"]
- elif "Acctg Status Update (EST)" in df:
- del df["Acctg Status Update (EST)"]
- df["Source Account Number"] = df["Source Account Number"].str[-4:].astype("int")
- df.columns = df.columns.str.replace(" ", "_").str.lower()
- df = df.rename(
- columns={
- "shares/par": "current_notional",
- "local_unrealized_gain/loss": "local_unrealized_pnl",
- "base_unrealized_gain/loss": "base_unrealized_pnl",
- }
- )
- for col in [
- "current_notional",
- "local_price",
- "base_price",
- "local_cost",
- "base_cost",
- "local_market_value",
- "base_market_value",
- "local_unrealized_pnl",
- "base_unrealized_pnl",
- "local_notional_cost",
- "base_notional_cost",
- "local_notional_value",
- "base_notional_value",
- ]:
- if df[col].dtype != "float64":
- df[col] = df[col].apply(lambda s: "-" + s[1:-1] if s.startswith("(") else s)
- df[col] = pd.to_numeric(df[col].str.replace(",", ""))
- df["row"] = df.index
- clear_date(date, conn)
- df.to_sql("bowdst_val", dawn_engine, if_exists="append", index=False)
-
-
-def difference(df):
- if ("db_mv" in df.columns) and ("db_notional" in df.columns):
- df["mv_difference"] = df["db_mv"] - df["admin_mv"]
- df["notional_difference"] = df["db_notional"] - df["admin_notional"]
- elif "db_mv" in df.columns:
- df["mv_difference"] = df["db_mv"] - df["admin_mv"]
- elif "db_notional" in df.columns:
- df["notional_difference"] = df["db_notional"] - df["admin_notional"]
- return df
-
-
-def sums(df):
- if ("db_mv" in df.columns) and ("db_notional" in df.columns):
- return df[["db_mv", "admin_mv", "db_notional", "admin_notional"]].sum()
- elif "db_mv" in df.columns:
- return df[["db_mv", "admin_mv"]].sum()
- elif "db_notional" in df.columns:
- return df[["db_notional", "admin_notional"]].sum()
-
-
-def recon(date, conn):
- df = pd.read_sql_query(
- "SELECT * FROM bowdst_val where as_of_date=%s", conn, params=(date,)
- )
- bowd_bond_trades = df[df["cusip"].notnull()]
- bond_asset_classes = ["Subprime", "CRT", "CLO"]
- bond_trades_combined = []
- for asset in bond_asset_classes:
- db_bond_trades = pd.read_sql_query(
- f"select * from risk_positions(%s, %s, 'BOWDST')",
- conn,
- params=(date, asset),
- )
- bond_trades = bowd_bond_trades.merge(
- db_bond_trades,
- left_on="mellon_security_id",
- right_on="identifier",
- how="right",
- )
- bond_trades = bond_trades[
- [
- "description",
- "identifier",
- "notional",
- "factor",
- "current_notional",
- "base_market_value",
- "usd_market_value",
- "identifier",
- ]
- ]
- bond_trades["db_notional"] = bond_trades["notional"] * bond_trades["factor"]
- bond_trades.rename(
- columns={
- "usd_market_value": "db_mv",
- "current_notional": "admin_notional",
- "base_market_value": "admin_mv",
- },
- inplace=True,
- )
- bond_trades_combined.append(bond_trades)
- bond_trades_combined = pd.concat(bond_trades_combined)
-
- tranche_trades = pd.read_sql_query(
- f"select security_desc, maturity, orig_attach, orig_detach, sum(notional * tranche_factor) as db_notional, sum(admin_notional) as admin_notional, sum(serenitas_clean_nav) as db_mv, sum(admin_clean_nav) as admin_mv from tranche_risk_bowdst where date=%s group by security_desc, maturity, orig_attach, orig_detach ;",
- conn,
- params=(date,),
- )
-
- cdx_trades = pd.read_sql_query(
- f"select security_id, security_desc, index, series, version, maturity, globeop_notional as admin_notional, notional * factor as db_notional, clean_nav as db_mv, globeop_nav as admin_mv from list_cds_marks_legacy(%s, null, 'BOWDST')",
- conn,
- params=(date,),
- )
-
- cdx_swaption_trades = pd.read_sql_query(
- f"select security_id, option_type, strike, expiration_date, sum(serenitas_nav) as db_mv, sum(globeop_nav) as admin_mv from list_swaption_positions_and_risks(%s, 'BOWDST') group by security_id, option_type, strike, expiration_date;",
- conn,
- params=(date,),
- )
-
- ir_swaption_trades = pd.read_sql_query(
- "SELECT deal_id, option_type, strike, SECURITY_Id, expiration_date, notional AS db_notional, current_notional AS admin_notional, nav AS db_mv, base_market_value AS admin_mv FROM list_ir_swaption_positions(%s, 'BOWDST') LEFT JOIN bowdst_val ON deal_id=link_ref WHERE as_of_date=%s;",
- conn,
- params=(prev_business_day(date), date),
- )
-
- kinds = [
- bond_trades_combined,
- tranche_trades,
- cdx_trades,
- cdx_swaption_trades,
- ir_swaption_trades,
- ]
- names = [
- "bond_trades",
- "tranche_trades",
- "cdx_trades",
- "cdx_swaption_trades",
- "ir_swaption_trades",
- ]
- overview = []
- em = ExchangeMessage()
- attachments = []
- for kind, name in zip(kinds, names):
- buf = StringIO()
- difference(kind.round(decimals=0).fillna(0)).to_csv(buf)
- attachments.append(
- FileAttachment(name=f"{name}_{date}.csv", content=buf.getvalue().encode())
- )
- pd.set_option("display.float_format", lambda x: "%.2f" % x)
- df = pd.DataFrame(sums(kind), columns=["sums"])
- df["name"] = name
- df.set_index("name")
- overview.append(df)
- buf = StringIO()
- pd.concat(overview).round(decimals=0).to_csv(buf)
- attachments.append(
- FileAttachment(name=f"overview.csv", content=buf.getvalue().encode())
- )
- em.send_email(
- subject=f"Notional Totals {date}",
- body="See attached",
- to_recipients=("fyu@lmcg.com",),
- attach=attachments,
- )
-
-
-def download_reports(date: datetime.date):
- target_directory = get_dir(date)
-
- if not target_directory.exists():
- target_directory.mkdir()
-
- em = ExchangeMessage()
-
- for msg in em.get_msgs(path=["Month End Recon", "BNY"]):
- # We are getting reports the month after
- from dateutil import relativedelta
-
- if (
- msg.datetime_received.month
- == (date + relativedelta.relativedelta(months=1)).month
- ):
- for attachment in msg.attachments:
- (target_directory / attachment.name).write_bytes(attachment.content)
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument("end_date", type=datetime.date.fromisoformat)
- args = parser.parse_args()
- # download_reports(args.end_date)
- dawndb = dbconn("dawndb")
- # load_val_report(args.end_date, dawndb)
- recon(args.end_date, dawndb)