aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/api_quotes/__init__.py (renamed from python/csv_headers/__init__.py)0
-rw-r--r--python/api_quotes/__main__.py23
-rw-r--r--python/api_quotes/api.py45
-rw-r--r--python/api_quotes/quotes.py66
-rw-r--r--python/bbg_cds_quotes.py1
-rw-r--r--python/bowdst.py348
-rw-r--r--python/cash_reports.py28
-rw-r--r--python/collateral/baml_fcm.py2
-rw-r--r--python/csv_headers/bond_upload.py164
-rw-r--r--python/csv_headers/citco.py338
-rw-r--r--python/csv_headers/globeop_upload.py780
-rw-r--r--python/csv_headers/irs.py136
-rw-r--r--python/csv_headers/mtm_upload.py201
-rw-r--r--python/csv_headers/test.py136
-rw-r--r--python/custodian_wire.py23
-rw-r--r--python/exploration/dispersion.py96
-rw-r--r--python/fcm_fx.py (renamed from python/baml_fcm_fx.py)25
-rw-r--r--python/innocap_file_transfer.py22
-rw-r--r--python/isosel_reports.py26
-rw-r--r--python/load_globeop_report.py3
-rw-r--r--python/manual_bond_upload.py14
-rw-r--r--python/notebooks/dispersion_tranche_model.ipynb198
-rw-r--r--python/pnl_explain.py4
-rw-r--r--python/position_file_bowdst.py254
-rw-r--r--python/position_file_isosel.py227
-rw-r--r--python/report_ops/__main__.py23
-rw-r--r--python/report_ops/custodians.py77
-rw-r--r--python/report_ops/headers.py36
-rw-r--r--python/report_ops/misc.py12
-rw-r--r--python/report_ops/sma.py278
-rw-r--r--python/risk/__main__.py17
-rw-r--r--python/risk/portfolio.py2
-rw-r--r--python/risk/tranches.py83
-rw-r--r--python/strat_cash_realloc.py2
-rw-r--r--python/task_server/__main__.py4
-rw-r--r--python/task_server/globeop.py14
-rw-r--r--python/upload_bbh_bonds.py28
-rw-r--r--python/upload_bbh_trades.py100
-rw-r--r--python/upload_umb_bonds.py22
39 files changed, 814 insertions, 3044 deletions
diff --git a/python/csv_headers/__init__.py b/python/api_quotes/__init__.py
index e69de29b..e69de29b 100644
--- a/python/csv_headers/__init__.py
+++ b/python/api_quotes/__init__.py
diff --git a/python/api_quotes/__main__.py b/python/api_quotes/__main__.py
new file mode 100644
index 00000000..e0cf8708
--- /dev/null
+++ b/python/api_quotes/__main__.py
@@ -0,0 +1,23 @@
+from .api import MarkitAPI
+from .quotes import Quote
+import pandas as pd
+import logging
+
+logger = logging.getLogger(__name__)
+
+if __name__ == "__main__":
+ asset_class = "CD"
+ after = None
+ while True:
+ if data := MarkitAPI.get_data(asset_class, after):
+ for row in data:
+ try:
+ quote = Quote.from_markit_line(row)
+ except ValueError as e:
+ logger.error(f"Couldn't pase {row['quoteid']}: {e}")
+ else:
+ quote.stage()
+ quote.commit()
+ after = f"{row['receiveddatetime']},{asset_class}-9480-{row['quoteid']}"
+ else:
+ break
diff --git a/python/api_quotes/api.py b/python/api_quotes/api.py
new file mode 100644
index 00000000..af479d67
--- /dev/null
+++ b/python/api_quotes/api.py
@@ -0,0 +1,45 @@
+from serenitas.utils.misc import get_credential_path
+import json
+import posixpath
+from urllib.parse import urljoin
+from typing import ClassVar
+import requests
+import pandas as pd
+
+
+def load_api_key():
+ with get_credential_path("markit_api").open() as fh:
+ creds = json.load(fh)
+ base_url = creds.pop("url")
+ r = requests.post(
+ urljoin(base_url, "apikey"),
+ data=creds,
+ )
+ return base_url, r.text
+
+
+def lowercase_keys(d):
+ return {k.lower(): v for k, v in d.items()}
+
+
+class MarkitAPI:
+ base_url, api_key = load_api_key()
+
+ @classmethod
+ def get_data(cls, asset_class, after=None, service="latest"):
+ params = {
+ "format": "json",
+ "assetClass": asset_class,
+ "apikey": cls.api_key,
+ "limit": 1000,
+ "sortBy": "receivedDateTime",
+ "descending": "true",
+ "dateformat": "MILLISECONDSSINCEEPOCH",
+ }
+ if after:
+ params["after"] = after
+ print(params)
+ path = posixpath.join("parsing", "Quote", service)
+ url = urljoin(cls.base_url, path)
+ r = requests.get(url, params)
+ return map(lowercase_keys, json.loads(r.text))
diff --git a/python/api_quotes/quotes.py b/python/api_quotes/quotes.py
new file mode 100644
index 00000000..28ca708b
--- /dev/null
+++ b/python/api_quotes/quotes.py
@@ -0,0 +1,66 @@
+from serenitas.ops.trade_dataclasses import Deal
+from dataclasses import dataclass
+import datetime
+from typing import Literal
+from serenitas.utils.db2 import dbconn
+
+firmness = Literal["FIRM", "INDICATIVE"]
+asset_class = Literal["CD"]
+
+
+def maturity_dt(d):
+ try:
+ return datetime.date(
+ int(d["maturityyear"]), int(d["maturitymonth"]), int(d["maturityday"])
+ )
+ except (
+ ValueError,
+ KeyError,
+ ): # Sometimes maturity isn't included but we still have tenor
+ return
+
+
+@dataclass
+class Quote(Deal, table_name="markit_singlename_quotes", deal_type=None):
+ quoteid: int
+ assetclass: asset_class
+ msg_id: str
+ quotesource: str
+ confidence: int
+ redcode: str = None
+ ticker: str = None
+ maturity: datetime.date = None
+ tenor: int = None
+ runningcoupon: int = None
+ bidconventionalspread: float = None
+ bidupfront: float = None
+ bidsize: float = None
+ askconventionalspread: float = None
+ askupfront: float = None
+ asksize: float = None
+ firmness: firmness = None
+ quotedate: datetime.datetime = None
+
+ @classmethod
+ def from_markit_line(cls, d):
+ additional_attributes = {
+ "maturity": maturity_dt(d),
+ "msg_id": d["message"]["id"],
+ "quotedate": datetime.datetime.fromtimestamp(
+ d["receiveddatetime"] / 1000
+ ).replace(tzinfo=datetime.timezone.utc),
+ "quotesource": d["sourceshortname"],
+ "tenor": f"{d['tenor']}Y",
+ }
+ d.update(additional_attributes)
+ return cls.from_dict(**d)
+
+ @property
+ def message(self):
+ return QuoteDetails.from_tradeid(self.msg_id)
+
+
+Quote.init_dbconn(dbconn("serenitasdb"))
+Quote._sql_insert = Quote._sql_insert.replace(
+ "RETURNING *", "ON CONFLICT (quoteid) DO NOTHING RETURNING *"
+)
diff --git a/python/bbg_cds_quotes.py b/python/bbg_cds_quotes.py
index 3fa451e1..ac96bf79 100644
--- a/python/bbg_cds_quotes.py
+++ b/python/bbg_cds_quotes.py
@@ -31,7 +31,6 @@ def bbg_call(session, securities, fields, r):
r = {}
bbg_call(securities, fields, r)
-breakpoint()
sql_str = f"INSERT INTO cds_quotes VALUES({','.join(['%s'] * 8)})"
diff --git a/python/bowdst.py b/python/bowdst.py
deleted file mode 100644
index c86aea15..00000000
--- a/python/bowdst.py
+++ /dev/null
@@ -1,348 +0,0 @@
-import datetime
-import pandas as pd
-import pathlib
-import warnings
-from exchangelib import FileAttachment
-from io import StringIO
-from typing import Tuple
-from serenitas.analytics.dates import bus_day
-from serenitas.utils.db import dbconn, dawn_engine
-from serenitas.utils.env import DAILY_DIR
-from report_ops.misc import get_dir
-
-
-def download_messages(em):
- bowdst_wire_recent = True
- for msg in em.get_msgs(
- 20,
- path=["BowdoinOps", "Reports"],
- subject__startswith="Document(s) from Reporting",
- ):
- if msg.sender == "notify@bnymellon.com":
- for attach in msg.attachments:
- fname = attach.name
- if fname.endswith("csv"):
- base_name = fname.removesuffix(".csv")
- file_type, date_part = base_name.split("_")
- match file_type:
- case (
- "Asset Detail"
- | "Net Investment Earned Income by Security"
- | "Settled Cash Statement"
- ):
- date = datetime.datetime.strptime(
- date_part, "%d %b %Y"
- ).date()
- case "BowdstWires":
- try:
- date = datetime.datetime.strptime(
- date_part, "%Y%m%d%H%M%S"
- ).date()
- except ValueError:
- date = datetime.datetime.strptime(
- date_part, "%d %b %Y%H%M%S"
- ).date()
- case "Live-cash":
- date = datetime.datetime.strptime(
- date_part, "%Y%m%d%H%M%S"
- ).date()
- case _:
- warnings.warn(f"Unknown report type {file_type}")
- continue
- p = DAILY_DIR / str(date) / "Reports" / fname
- if not p.parent.exists():
- p.parent.mkdir(parents=True, exist_ok=True)
- if not p.exists() or (
- fname.startswith("BowdstWires") and bowdst_wire_recent
- ):
- if fname.startswith("BowdstWires"):
- bowdst_wire_recent = False
- p.write_bytes(attach.content)
-
-
-def load_val_report(workdate: datetime.date):
- p = get_dir(workdate) / f"Asset Detail_{workdate:%d %b %Y}.csv"
- df = pd.read_csv(
- p, thousands=",", parse_dates=["As Of Date", "Maturity Date", "Report Run Date"]
- )
- df = df.drop(
- [
- "Reporting Account Number",
- "Reporting Account Name",
- "Source Account Name",
- "Xref Security ID",
- "Country Name",
- "Country Code",
- "Local Currency Name",
- "Acct Base Currency Name",
- "Acct Base Currency Code",
- "CINS",
- "Issuer ID",
- "SEDOL",
- "Valoren",
- "Sicovam",
- "WPK",
- "Quick",
- "Underlying Sec ID",
- "Loan ID",
- "Manager",
- "Book Yield Value",
- "Counterparty",
- "Ticker with Exchange Code",
- "Ticker with Yellow Key",
- "Accounting Status",
- "Primary GSP Account",
- "Extended GSP Account Number",
- "Percent Of Total",
- ],
- axis=1,
- )
- if "Acctg Status Update (EDT)" in df:
- del df["Acctg Status Update (EDT)"]
- elif "Acctg Status Update (EST)" in df:
- del df["Acctg Status Update (EST)"]
- df["Source Account Number"] = df["Source Account Number"].str[-4:].astype("int")
- df.columns = df.columns.str.replace(" ", "_").str.lower()
- df = df.rename(
- columns={
- "shares/par": "current_notional",
- "local_unrealized_gain/loss": "local_unrealized_pnl",
- "base_unrealized_gain/loss": "base_unrealized_pnl",
- }
- )
- for col in [
- "current_notional",
- "local_price",
- "base_price",
- "local_cost",
- "base_cost",
- "local_market_value",
- "base_market_value",
- "local_unrealized_pnl",
- "base_unrealized_pnl",
- "local_notional_cost",
- "base_notional_cost",
- "local_notional_value",
- "base_notional_value",
- ]:
- if df[col].dtype != "float64":
- df[col] = df[col].apply(lambda s: "-" + s[1:-1] if s.startswith("(") else s)
- df[col] = pd.to_numeric(df[col].str.replace(",", ""))
- df["row"] = df.index
- df.to_sql("bowdst_val", dawn_engine, if_exists="append", index=False)
-
-
-def load_pnl_report(workdate: datetime.date):
- if workdate.weekday() == 0:
- workdate -= datetime.timedelta(days=2)
- p = (
- get_dir(workdate)
- / f"Net Investment Earned Income by Security_{workdate:%d %b %Y}.csv"
- )
- df = pd.read_csv(p, thousands=",", parse_dates=["Begin Date", "End Date"])
- df = df.drop(
- [
- "Reporting Account Number",
- "Reporting Account Name",
- "Reporting Account Base Currency",
- "Accounting Status",
- "Security Cross Reference Type",
- "Security Cross Reference Cusip",
- "Local Currency Description",
- "Country Of Issue",
- "Country Of Issue Description",
- "State Code",
- "Asset Type Code",
- "5500 category code",
- "5500 class code Description",
- "CINS",
- "SEDOL",
- "Valoren",
- "Sicovam",
- "WPK",
- "QUICK",
- "Underlying Sec ID",
- "Loan ID",
- "Counterparty",
- "Source Account Name",
- "Source Account Number",
- "Fair Value Hierarchy - Beginning of Period",
- "Fair Value Override - Beginning of Period",
- "Fair Value Hierarchy - End of Period",
- "Fair Value Override - End of Period",
- "Country of Registration name",
- "Country of Registration code",
- ],
- axis=1,
- )
- df.columns = df.columns.str.replace(" ", "_").str.lower()
- df = df.rename(
- columns={
- "change_in_unrealized_currency_gain_loss": "unrealized_currency",
- "change_in_unrealized_investment_gain_loss": "unrealized_investment",
- "total_change_in_unrealized_gain_loss": "total_unrealized",
- "accretion/amortization": "accretion_amortization",
- "journal_entry_accretion/amortization": "journal_entry_accretion_amortization",
- "realized_gain/loss": "realized_gain_loss",
- "journal_entry_realized_g/l": "journal_entry_realized_gl",
- "manager": "link_ref",
- "realized_gain/loss_investment": "realized_investment",
- "realized_gain/loss_currency": "realized_currency",
- "realized_gain/loss_settled": "realized_settled",
- "realized_gain/loss_traded": "realized_traded",
- }
- )
- for col in [
- "unrealized_currency",
- "unrealized_investment",
- "total_unrealized",
- "ending_shares_par",
- "opening_receivables",
- "closing_receivables",
- "income",
- "journal_entry_income",
- "accretion_amortization",
- "journal_entry_accretion_amortization",
- "realized_gain_loss",
- "journal_entry_realized_gl",
- "realized_loss_impaired_securities",
- "net_investment_income",
- "realized_investment",
- "realized_currency",
- "realized_settled",
- "realized_traded",
- ]:
- if df[col].dtypes == "object":
- df[col] = df[col].apply(lambda s: "-" + s[1:-1] if s.startswith("(") else s)
- df[col] = pd.to_numeric(df[col].str.replace(",", ""))
- df["row"] = df.index
- df.to_sql("bowdst_pnl", dawn_engine, if_exists="append", index=False)
-
-
-def cmp_positions(cob: datetime.date, df_blotter: pd.DataFrame) -> pd.DataFrame:
- workdate = (cob + bus_day).date()
- p = DAILY_DIR / str(workdate) / "Reports" / f"Asset Detail_{workdate:%d %b %Y}.csv"
- df = pd.read_csv(p, thousands=",")
- df = df[df["Asset Type"] == "FIXED INCOME SECURITIES"]
- df = df.set_index("CUSIP")
- df = df[["Shares/Par", "Base Price", "Local Market Value"]]
- for col in df.select_dtypes(include=["object"]).columns:
- df[col] = df[col].apply(lambda s: s[1:-1] if s.startswith("(") else s)
- df[col] = pd.to_numeric(df[col].str.replace(",", ""))
- check = df_blotter.join(df)
- return check
-
-
-def load_cash_report(workdate: datetime.date, cob):
- p = (
- DAILY_DIR
- / str(workdate)
- / "Reports"
- / f"Settled Cash Statement_{workdate:%d %b %Y}.csv"
- )
- df = pd.read_csv(p, thousands=",")
- df = df[
- df["Transaction Type"].isna() | df["Transaction Type"].isin(["BUY", "SELL"])
- ]
- df["Opening Balance Local"] = (
- df["Opening Balance Local"]
- .replace("[(]", "-", regex=True)
- .replace("[),]", "", regex=True)
- .astype("float")
- )
- df = df.groupby(["Account Name", "Account Number", "Local Currency Code"]).sum()
- df["date"] = cob
- df["fund"] = "BOWDST"
- df = df[["Opening Balance Local", "date", "fund"]]
- df.reset_index(inplace=True)
- df["Account Number"] = df["Account Number"].astype(
- "int64"
- ) # Account Numbers are read in as float
- df = df.rename(
- {
- "Account Name": "account_name",
- "Account Number": "account_number",
- "Local Currency Code": "currency_code",
- "Opening Balance Local": "balance",
- },
- axis=1,
- )
- df.to_sql("cash_balances", dawn_engine, if_exists="append", index=False)
-
-
-def get_positions(
- cob: datetime.date,
-) -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
- dawndb = dbconn("dawndb")
- df_blotter = pd.read_sql_query(
- "SELECT * FROM risk_positions(%s, NULL, %s)",
- dawndb,
- params=(cob, "BOWDST"),
- index_col=["identifier"],
- )
-
- cds_positions = pd.read_sql_query(
- "SELECT * FROM list_cds_marks_pre(%s, NULL, %s)",
- dawndb,
- params=(cob, "BOWDST"),
- index_col=["security_id"],
- )
- tranche_positions = pd.read_sql_query(
- "SELECT id, security_id, security_desc, maturity, a.notional, "
- "protection, orig_attach, orig_detach, tranche_factor, clean_nav, "
- "accrued, cp_code, cpty_id from list_cds(%s, %s) a "
- "LEFT JOIN tranche_risk ON id=tranche_id AND date=%s "
- "WHERE orig_attach IS NOT NULL",
- dawndb,
- params=(cob, "BOWDST", cob),
- index_col=["id"],
- )
-
- return df_blotter, cds_positions, tranche_positions
-
-
-def send_email(
- em,
- workdate: datetime.date,
- df_bonds: pd.DataFrame,
- df_cds: pd.DataFrame,
- df_tranches: pd.DataFrame,
-):
- attachments = []
- for name, df in zip(("bonds", "cds", "tranches"), (df_bonds, df_cds, df_tranches)):
- buf = StringIO()
- df.to_csv(buf)
- attachments.append(
- FileAttachment(
- name=f"{workdate} {name}.csv", content=buf.getvalue().encode()
- )
- )
- buf.close()
- em.send_email(
- f"{workdate} EOD positions",
- "",
- to_recipients=("hm-operations@bnymellon.com",),
- cc_recipients=("bowdoin-ops@lmcg.com",),
- attach=attachments,
- )
-
-
-if __name__ == "__main__":
- import argparse
- from serenitas.utils.exchange import ExchangeMessage
-
- parser = argparse.ArgumentParser()
- parser.add_argument(
- "workdate",
- nargs="?",
- type=datetime.date.fromisoformat,
- default=datetime.date.today(),
- help="working date",
- )
- args = parser.parse_args()
- em = ExchangeMessage()
- download_messages(em)
- cob = (args.workdate - bus_day).date()
- df_bonds, df_cds, df_tranches = get_positions(cob)
- send_email(em, cob, df_bonds, df_cds, df_tranches)
- load_cash_report(args.workdate, cob)
diff --git a/python/cash_reports.py b/python/cash_reports.py
deleted file mode 100644
index f0937d0d..00000000
--- a/python/cash_reports.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from report_ops.cash import NTCashReport, UMBCashReport, BNYCashReport
-import datetime
-from serenitas.analytics.dates import prev_business_day
-import logging
-
-logger = logging.getLogger(__name__)
-if __name__ == "__main__":
- import argparse
-
- parser = argparse.ArgumentParser()
- parser.add_argument(
- "date",
- nargs="?",
- type=datetime.date.fromisoformat,
- default=datetime.date.today(),
- help="work date",
- )
- args = parser.parse_args()
- for report_cls in (
- NTCashReport,
- UMBCashReport,
- BNYCashReport,
- ):
- report = report_cls(args.date)
- try:
- report.to_db()
- except ValueError as e:
- logger.warning(e)
diff --git a/python/collateral/baml_fcm.py b/python/collateral/baml_fcm.py
index 7508aad1..1d986941 100644
--- a/python/collateral/baml_fcm.py
+++ b/python/collateral/baml_fcm.py
@@ -145,7 +145,7 @@ def collateral(d, positions, *, engine, fund="Serenitas", positions_irs, **kwarg
}
)
df_margin = df_margin.set_index("AT", append=True)
- df_margin = df_margin.groupby(level=(0, 1)).sum()
+ df_margin = df_margin.groupby(level=(0, 1)).sum(numeric_only=True)
df_margin = df_margin.reset_index()
df_margin_fut.AT = "USD"
df_margin_fut = df_margin_fut.reset_index()
diff --git a/python/csv_headers/bond_upload.py b/python/csv_headers/bond_upload.py
deleted file mode 100644
index db1eb066..00000000
--- a/python/csv_headers/bond_upload.py
+++ /dev/null
@@ -1,164 +0,0 @@
-BBH_BONDS = [
- "Function of Instruction",
- "Client Reference Number",
- "Previous Reference Number",
- "Account Number",
- "Transaction Type",
- "Place of Settlement/Country",
- "Place of Safekeeping",
- "Trade Date",
- "Settlement Date",
- "Security ID",
- "Security Description",
- "Unit / Original Face Amount",
- "Currency",
- "Unit Price Amount",
- "Net Amount",
- "Trading Broker Type/ID",
- "Trading Broker Description",
- "Beneficiary of Securities Account",
- "Clearing Broker ID / Type",
- "Clearing Broker Description",
- "Clearing Agent Account",
- "Stamp Duty Code",
- "Stamp Duty Amount",
- "Special Settlement Type",
- "Special Indicator #1",
- "Special Indicator #2",
- "Registration Details",
- "Special Instruction",
- "Originator of Message",
- "Current Face/Amortize Value",
- "Principal Amount",
- "Interest Amount",
- "Other Fees Amount",
- "Commission Amount",
- "SEC Fees Amount",
- "Transaction Tax Amount",
- "Withholding Tax Amount",
- "Exchange Rate",
- "Resulting Currency",
- "Resulting Amount",
- "FX Currency",
- "Pool Reference Number",
- "Total Group Number",
- "Trade Number",
- "Repo Term Date (REPO only)",
- "Repo Amount (REPO only)",
- "Repo Reference Number (REPO only)",
- "Repo Rate (REPO Only)",
- "Ticker (CPF and CRF Only)",
- "Strike Price (CPF and CRF Only)",
- "Expiration Date (CPF and CRF Only)",
- "Broker Number (CPF and CRF Only)",
- "Broker Account (CPF and CRF Only)",
- "Contract Size (Option Contract and Future Contract Only)",
- "Place of Trade Narrative",
- "Common Reference",
- "Partial Settlement Allowed",
- "Partial Settlement Tolerance",
- "No Automatic Market Claim",
- "Corporate Action Coupon Option",
- "Triparty Collateral Segregation",
- "FX Cancel - For CANC instructions only",
- "Fund Accounting Only Trade (RPTO)",
- "Custody Only Trade (NACT)",
- "Research Fee (RSCH)",
-]
-
-bbh_swap = [
- "Deal Type",
- "Deal Id",
- "Action",
- "Client",
- "Fund",
- "Portfolio",
- "Folder",
- "Custodian",
- "Cash Account",
- "Counterparty",
- "Comments",
- "State",
- "Trade Date",
- "Reserved",
- "Reserved",
- "Reserved",
- "Notional",
- "PremiumSettlementDate",
- "ExpirationDate",
- "PremiumCurrency",
- "PercentageOfPremium",
- "ExerciseType",
- "Reserved",
- "SettlementMode",
- "SettlementRate",
- "Transaction Indicator",
- "InitialMargin",
- "InitialMarginPercentage",
- "InitialMarginCurrency",
- "ReceiveLegRateType",
- "ReceiveFloatRate",
- "ReceiveFirstCouponDate",
- "ReceiveFirstCouponRate",
- "ReceiveFixedRate",
- "ReceiveDaycount",
- "ReceiveFrequency",
- "ReceivePaymentRollConvention",
- "ReceiveEffectiveDate",
- "ReceiveMaturityDate",
- "ReceiveNotional",
- "ReceiveArrears",
- "ReceiveAdjusted",
- "ReceiveCompound",
- "ReceiveCurrency",
- "PayLegRateType",
- "PayFloatRate",
- "PayFirstCouponDate",
- "PayFirstCouponRate",
- "PayFixedRate",
- "PayDaycount",
- "PayFrequency",
- "PayPaymentRollConvention",
- "PayEffectiveDate",
- "PayMaturityDate",
- "PayNotional",
- "PayArrears",
- "PayAdjusted",
- "PayCompound",
- "PayCurrency",
- "RegenerateCashFlow",
- "GiveUpBroker",
- "ClientReference",
- "ReceiveDiscountCurve",
- "ReceiveForwardCurve",
- "PayDiscountCurve",
- "PayForwardCurve",
- "ReceiveFixingFrequency",
- "ReceiveInterestCalcMethod",
- "ReceiveCompoundAverageFrequency",
- "PayFixingFrequency",
- "PayInterestCalcMethod",
- "PayCompoundAverageFrequency",
- "SwapType",
- "AttachmentPoint",
- "ExhaustionPoint",
- "UnderlyingInstrument",
- "AssociatedDealType",
- "AssociatedDealId",
- "CounterpartyReference",
- "PremiumSettlementCurrency",
- "PremiumSettlementAmount",
- "ReceiveIMM Period",
- "PayIMMPeriod",
- "Reserved",
- "ClearingFacility",
- "Strike",
- "CcpTradeRef",
- "BreakClauseFrequency",
- "BlockId",
- "BlockAmount",
- "Cross Currency Premium Payment",
- "Premium Payment Amount",
- "Netting Id",
- "BreakClauseDate",
-]
diff --git a/python/csv_headers/citco.py b/python/csv_headers/citco.py
deleted file mode 100644
index 689472a1..00000000
--- a/python/csv_headers/citco.py
+++ /dev/null
@@ -1,338 +0,0 @@
-GTL = [
- "OrdStatus",
- "ExecTransType",
- "ClientOrderID",
- "FillID",
- "IDofOrderOrFillforAction",
- "LotNumber",
- "Symbol",
- "SecurityType",
- "SecurityCurrency",
- "SecurityDescription",
- "BuySellShortCover",
- "OpenClose",
- "IDSource",
- "SecurityID",
- "ISIN",
- "CUSIP",
- "SEDOL",
- "Bloomberg",
- "CINS",
- "WhenIssued",
- "IssueDate",
- "MaturityDate",
- "Coupon%",
- "ExecutionInterestDays",
- "AccruedInterest",
- "FaceValue",
- "RollableType",
- "RepoCurrency",
- "DayCountFraction/RepoCalendar",
- "RepoLoanAmount",
- "Trader",
- "OrderQty",
- "FillQty",
- "CumQty",
- "HairCut",
- "AvgPrice",
- "FillPrice",
- "TradeDate",
- "TradeTime",
- "OrigDate",
- "Unused",
- "SettlementDate",
- "ExecutingUser",
- "Comment",
- "Account",
- "Fund",
- "SubFund",
- "AllocationCode",
- "StrategyCode",
- "ExecutionBroker",
- "ClearingAgent",
- "ContractSize",
- "Commission",
- "FXRate",
- "FWDFXpoints",
- "Fee",
- "CurrencyTraded",
- "SettleCurrency",
- "FX/BASErate",
- "BASE/FXrate",
- "StrikePrice",
- "PutOrCall",
- "DerivativeExpiry",
- "SubStrategy",
- "OrderGroup",
- "RepoPenalty",
- "CommissionTurn",
- "AllocRule",
- "PaymentFreq",
- "RateSource",
- "Spread",
- "CurrentFace",
- "CurrentPrincipalFactor",
- "AccrualFactor",
- "TaxRate",
- "Expenses",
- "Fees",
- "PostCommAndFeesOnInit",
- "ImpliedCommissionFlag",
- "TransactionType",
- "MasterConfrimType",
- "MatrixTerm",
- "EMInternalSeqNo.",
- "ObjectivePrice",
- "MarketPrice",
- "StopPrice",
- "NetConsdieration",
- "FixingDate",
- "DeliveryInstructions",
- "ForceMatchID",
- "ForceMatchType",
- "ForceMatchNotes",
- "CommissionRateforAllocation",
- "CommissionAmountforFill",
- "ExpenseAmountforFill",
- "FeeAmountforFill",
- "StandardStrategy",
- "StrategyLinkName",
- "StrategyGroup",
- "FillFXSettleAmount",
- "Reserved",
- "Reserved",
- "DealAttributes",
- "FinanceLeg",
- "PerformanceLeg",
- "Attributes",
- "DealSymbol",
- "Initialmargintype",
- "InitialMarginAmount",
- "InitialmarginCCY",
- "ConfirmStatus",
- "Counterparty",
- "TraderNotes",
- "ConvertPricetoSettleCcy",
- "BondCouponType",
- "GenericFeesEnabled",
- "GenericFeesListing",
- "OrderLevelAttributes",
- "Settling/Sub",
- "ConfirmationTime",
- "ConfirmationMeans",
- "PaymentDate",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
- "",
-]
-GIL = [
- "Command",
- "Group_Id",
- "UniqueIdentifier",
- "InstrumentType",
- "UnderlyingIDSource",
- "UnderlyingSecurityId",
- "UnderlyingISIN",
- "UnderlyingCUSIP",
- "UnderlyingSEDOL",
- "UnderlyingBloombergCode",
- "UnderlyingCINS",
- "UnderlyingRIC",
- "UnderlyingCDS",
- "UnderlyingCDSDN",
- "UnderlyingUserID",
- "UnderlyingTID",
- "Symbol",
- "(BLANK)",
- "Birth_date",
- "Death_date",
- "Active",
- "(Blank)",
- "(Blank)",
- "(Blank)",
- "Sec_Desc",
- "(Blank)",
- "LocalCcy",
- "Country",
- "SettleCal",
- "(Blank)",
- "TickSize",
- "MarketID",
- "PriceBase",
- "PriceFactor",
- "FixRate",
- "ResetFreq",
- "(Blank)",
- "(Blank)",
- "1stCpnDate",
- "LastCpnDate",
- "CouponRate",
- "CashFlowFreq_Id",
- "SettleDays",
- "DayCount_ID",
- "AccruMethodID",
- "AccruStartDate",
- "IssueAmount",
- "CreditEvent",
- "CounterParty",
- "CtpyAbbrev",
- "Tier",
- "CtpyCountry",
- "CtpyCountry",
- "Ctpymoody",
- "BondClass",
- "BondType",
- "SerisCode",
- "(Blank)",
- "RateSetDate",
- "GeneralDirection",
- "PrincipalExchTypeID",
- "S_P_PaymentFreqID",
- "S_P_CurrencyCode",
- "S_P_RateIndexID",
- "S_P_AccrualMethodID",
- "S_P_InterestRate",
- "S_P_PaymentCalandarID",
- "S_P_DayConventionID",
- "S_P_ResetFreqID",
- "S_P_NotionalAmt",
- "S_P_ResetCalandarID",
- "S_P_RateSourceID",
- "S_P_InitialResetRate",
- "(Blank)",
- "(Blank)",
- "(Blank)",
- "(Blank)",
- "S_R_PaymentFreqID",
- "S_R_CurrencyCode",
- "S_R_RateIndexID",
- "S_R_AccrualMethodID",
- "S_R_InterestRate",
- "S_R_PaymentCalandarID",
- "S_R_DayConventionID",
- "S_R_ResetFreqID",
- "S_R_NotionalAmount",
- "S_R_ResetCalandarID",
- "S_R_RateSource",
- "S_R_InitialResetRate",
- "(Blank)",
- "(Blank)",
- "(Blank)",
- "(Blank)",
- "OtherCode1",
- "OtherCode1-Value",
- "OtherCode2",
- "OtherCode2-Value",
- "Attribute1",
- "Attribute1-Value",
- "Attribute1-Type",
- "Attribute2",
- "Attribute2-Value",
- "Attribute2-Type",
- "Attribute3",
- "Attribute3-Value",
- "Attribute3-Type",
- "Attribute4",
- "Attribute4-Value",
- "Attribute4-Type",
- "Attribute5",
- "Attribute5-Value",
- "Attribute5-Type",
- "(Blank)",
- "OptionType",
- "StrikeMonth",
- "StrikePrice",
- "ExpirationDate",
- "Put/CallFlag",
- "ContractSize",
- "CashRebate",
- "Barrier1",
- "Barrier2",
- "Notes",
- "(Blank)",
- "DeliveryPeriodType",
- "DeliveryPeriod",
- "DeliveryAbbrev",
- "DaysDelay",
- "CurrentPrincipalFactor",
- "AccrualFactor",
- "(Blank)",
- "Odd_First_Coupon",
- "Odd_Last_Coupon",
- "Accrual_Startdate",
- "Accrual_Enddate",
- "Balloon_Payment",
- "Compound_Method",
- "Scale_Factor",
- "CDS_Subtype_ID",
- "Recovery_Rate",
- "Attachment_Points",
- "Detachment_Points",
- "(Blank)",
- "Spread_Bps",
- "Rate_Change_Fre",
- "Spread_Start_Date",
- "Rate_Source_Id",
- "OTC_FloatingRate_Flag",
- "VAR_Start_Date",
- "FutureName",
- "LastTradeDate",
- "LCode",
- "CurrentStartDate",
- "SpotLimitDate",
- "FirstNoticeDate",
- "LastNoticeDate",
- "CTDTID",
- "CTDConv.Factor",
- "RollDate",
- "ValueDate1",
- "EndDate1",
- "ValueDate2",
- "EndDate2",
- "ValueDate3",
- "EndDate3",
- "ValueDate4",
- "EndDate4",
- "ValueDate5",
- "EndDate5",
- "ForeignFlag",
- "RestrictedFlag",
- "ParValue",
- "SharesOutstanding",
- "Industry_SIC_ID",
- "GICSLevel3ID",
- "InflationIndexFlag",
- "LinearAccrualCalcFlag",
- "ExpirationTime",
- "ExpirationTimeZoneId",
- "SwapStartDate",
- "ExpValueDateTimeComponent",
- "BasketTypeID",
- "BasketLinkAmount2",
- "BasketLinkPercent2",
- "BasketLinkTID3",
- "BasketLinkAmount3",
- "BasketLinkPercent3",
- "BasketLinkFromDate",
- "BasketLinkToDate",
- "BasketLinkComments",
- "BarrierOptionWindow1",
- "BarrierOptionWindow2",
-]
diff --git a/python/csv_headers/globeop_upload.py b/python/csv_headers/globeop_upload.py
deleted file mode 100644
index cd2cf00c..00000000
--- a/python/csv_headers/globeop_upload.py
+++ /dev/null
@@ -1,780 +0,0 @@
-HEADERS_PRE = [
- "Deal Type",
- "Deal Id",
- "Action",
- "Client",
- "Fund",
- "Portfolio",
- "Folder",
- "Custodian",
- "Cash Account",
- "Counterparty",
- "Comments",
- "State",
- "Trade Date",
-]
-
-HEADERS = {
- "bond": HEADERS_PRE
- + [
- "Settlement Date",
- "BrokerShortName",
- "GlopeOp Security Identifier",
- "CUSIP",
- "ISIN",
- "Sedol",
- "Reserved",
- "Reserved",
- "Security Description",
- "Transaction Indicator",
- "SubTransaction Indicator",
- "Quantity",
- "Price",
- "Commission",
- "Tax",
- "BlockId",
- "BlockAmount",
- "Reserved",
- "Reserved",
- "Accrued",
- "ClearingMode",
- "FaceAmount",
- "Reserved",
- "SettlementCurrency",
- "Reserved",
- "CrossCurrencyRate",
- "ClientReference",
- "Reserved",
- "SettlementAmount",
- "Yield",
- "TradeDateTimeStamp",
- "CpiRefRatio",
- "SettlementCurrencyHedge",
- "TradeDateFx",
- ],
- "cds": HEADERS_PRE
- + [
- "Reserved",
- "Reserved",
- "EffectiveDate",
- "MaturityDate",
- "Currency",
- "Notional",
- "FixedRate",
- "PaymentRollDateConvention",
- "DayCount",
- "PaymentFrequency",
- "FirstCouponRate",
- "FirstCouponDate",
- "ResetLag",
- "Liquidation",
- "LiquidationDate",
- "Protection",
- "UnderlyingSecurityId",
- "UnderlyingSecurityDescription",
- "CreditSpreadCurve",
- "CreditEvents",
- "RecoveryRate",
- "Settlement",
- "InitialMargin",
- "InitialMarginPercentage",
- "InitialMarginCurrency",
- "DiscountCurve",
- "ClientReference",
- "UpfrontFee",
- "UpfrontFeePayDate",
- "RegenerateCashFlow",
- "UpfrontFeeComment",
- "Executing Broker",
- "SwapType",
- "OnPrice",
- "OffPrice",
- "AttachmentPoint",
- "ExhaustionPoint",
- "Fees",
- "Fee Payment Dates",
- "Fee Comments",
- "Credit Event Occurred",
- "Calendar",
- "Clearing Facility",
- "Adjusted",
- "CcpTradeRef",
- "BlockId",
- "BlockAmount",
- "NettingId",
- "AnnouncementDate",
- "ExecTS",
- "DefaultProbability",
- "ClientMargin",
- "Factor",
- "ISDADefinition",
- ],
- "swaption": HEADERS_PRE
- + [
- "Reserved",
- "Reserved",
- "Reserved",
- "Notional",
- "PremiumSettlementDate",
- "ExpirationDate",
- "PremiumCurrency",
- "PercentageOfPremium",
- "ExerciseType",
- "Reserved",
- "SettlementMode",
- "SettlementRate",
- "Transaction Indicator",
- "InitialMargin",
- "InitialMarginPercentage",
- "InitialMarginCurrency",
- "ReceiveLegRateType",
- "ReceiveFloatRate",
- "ReceiveFirstCouponDate",
- "ReceiveFirstCouponRate",
- "ReceiveFixedRate",
- "ReceiveDaycount",
- "ReceiveFrequency",
- "ReceivePaymentRollConvention",
- "ReceiveEffectiveDate",
- "ReceiveMaturityDate",
- "ReceiveNotional",
- "ReceiveArrears",
- "ReceiveAdjusted",
- "ReceiveCompound",
- "ReceiveCurrency",
- "PayLegRateType",
- "PayFloatRate",
- "PayFirstCouponDate",
- "PayFirstCouponRate",
- "PayFixedRate",
- "PayDaycount",
- "PayFrequency",
- "PayPaymentRollConvention",
- "PayEffectiveDate",
- "PayMaturityDate",
- "PayNotional",
- "PayArrears",
- "PayAdjusted",
- "PayCompound",
- "PayCurrency",
- "RegenerateCashFlow",
- "GiveUpBroker",
- "ClientReference",
- "ReceiveDiscountCurve",
- "ReceiveForwardCurve",
- "PayDiscountCurve",
- "PayForwardCurve",
- "ReceiveFixingFrequency",
- "ReceiveInterestCalcMethod",
- "ReceiveCompoundAverageFrequency",
- "PayFixingFrequency",
- "PayInterestCalcMethod",
- "PayCompoundAverageFrequency",
- "SwapType",
- "AttachmentPoint",
- "ExhaustionPoint",
- "UnderlyingInstrument",
- "AssociatedDealType",
- "AssociatedDealId",
- "CounterpartyReference",
- "PremiumSettlementCurrency",
- "PremiumSettlementAmount",
- "ReceiveIMM Period",
- "PayIMMPeriod",
- "Reserved",
- "ClearingFacility",
- "Strike",
- "CcpTradeRef",
- "BreakClauseFrequency",
- "BlockId",
- "BlockAmount",
- "Cross Currency Premium Payment",
- "Premium Payment Amount",
- "Netting Id",
- "BreakClauseDate",
- ],
- "future": HEADERS_PRE
- + [
- "Settlement Date",
- "Reserved",
- "GlopeOp Security Identifier",
- "Reserved",
- "Reserved",
- "Reserved",
- "Bloomberg Ticker",
- "RIC",
- "Security Description",
- "Transaction Indicator",
- "SubTransaction Indicator",
- "Quantity",
- "Price",
- "Commission",
- "Tax",
- "VAT",
- "Trade Currency",
- "Reserved",
- "Reserved",
- "Broker Short Name",
- "MaturityDate",
- "Exchange",
- "Client Reference",
- "Swap Type",
- "Initial Margin",
- "Initial Margin Currency",
- "Future Event",
- "Commission Entries",
- "BlockId",
- "Block Amount",
- ],
- "wire": HEADERS_PRE
- + [
- "Settlement Date",
- "Reserved",
- "Reserved",
- "Currency",
- "Amount",
- "Associated Deal Type",
- "Associated Deal Id",
- "Transaction Type",
- "Instrument Type",
- "Yield",
- "Client Reference",
- "ClearingFacility",
- "Deal Function",
- "Reset Price",
- "Reset Date",
- "Ccp Trade Ref",
- "Margin Type",
- "Block Id",
- "Block Amount",
- ],
- "spot": HEADERS_PRE
- + [
- "Settlement Date",
- "Dealt Currency",
- "Spot Rate",
- "Forward Rate",
- "Buy Currency",
- "Buy Amount",
- "Sell Currency",
- "Sell Amount",
- "ClearingFees",
- "BlockId",
- "BlockAmount",
- "Commission Currency",
- "Commission",
- "Reserved",
- "AssociatedDealType",
- "AssociatedDealId",
- "BrokerShortName",
- "ClientReference",
- ],
- "fx_swap": HEADERS_PRE
- + [
- "Reserved",
- "Dealt Currency",
- "Currency Pair",
- "Near Side Currency Rate",
- "Near Side Settlement Date",
- "Near Side Buy Currency",
- "Near Side Buy Amount",
- "Near Side Sell Currency",
- "Near Side Sell Amount",
- "Reserved",
- "Far Side Rate",
- "Far Side Settlement Date",
- "Far Side Point",
- "Far Side Buy Currency",
- "Far Side Buy Amount",
- "Far Side Sell Currency",
- "Far Side Sell Amount",
- "Client Reference",
- "BrokerShortName",
- "CcpTradeRef",
- "BlockId",
- "BlockAmount",
- ],
- "repo": HEADERS_PRE
- + [
- "Settlement Date",
- "Broker",
- "GlopeOp Security Identifier",
- "CUSIP",
- "ISIN",
- "Sedol",
- "Reserved",
- "Reserved",
- "Security Description",
- "TransactionIndicator",
- "CurrentFactor",
- "Quantity",
- "Price",
- "Reserved",
- "Reserved",
- "Reserved",
- "Currency",
- "ExchangeRate",
- "Comments",
- "Reserved",
- "ExpirationDate",
- "Reserved",
- "WeightedAmount",
- "InterestCalcMethod",
- "DirtyPrice",
- "Haircut",
- "RepoRate",
- "OpenRepo",
- "CallNotice",
- "FaceAmount",
- "AccruedInterest",
- "Yield",
- "CouponTo",
- "DayCount",
- "ClearingMode",
- "SecurityType",
- "BrokerShortName",
- "ClientReference",
- "DateTimeStamp",
- ],
- "capfloor": HEADERS_PRE
- + [
- "Reserved",
- "Reserved",
- "FloatingRateIndex",
- "FloatingRateIndexDescription",
- "TransactionIndicator",
- "Reserved",
- "CapOrFloor",
- "Notional",
- "Strike",
- "ValueDate",
- "ExpirationDate",
- "PremiumPercent",
- "PremiumDate",
- "PricingType",
- "PaymentFrequency",
- "FixingFrequency",
- "DayCountConvention",
- "PaymentBDC",
- "Reserved",
- "PaymentAtBeginningOrEnd",
- "Commission",
- "FirstCouponDate",
- "InitialMargin",
- "InitialMarginPercent",
- "InitialMarginCurrency",
- "Reserved",
- "Reserved",
- "Reserved",
- "ResetLag",
- "Adjusted",
- "CashType",
- "BinaryFixedAmount",
- "BarrierPaymentAt",
- "KnockPeriod",
- "UpperBarrier",
- "LowerBarrier",
- "RebateUp",
- "RebateDown",
- "RebateSettlementLag",
- "ClientReference",
- "BrokerShortName",
- "CptyReference",
- "SwapType",
- "ClearingFacility",
- "CcpTradeRef",
- "BlockId",
- "BlockAmount",
- "Netting Id",
- "TradeDateTimeStamp",
- "AccrualBDC",
- "MaturityBDC",
- "RollConvention",
- "Calendar",
- "Arrears",
- "PaymentLag",
- "Reserved1",
- "InflationLag",
- "InflationReference",
- "SettlementCurrency",
- "Collateralized",
- "TradeDateFX",
- ],
- "trs": HEADERS_PRE
- + [
- "Reserved",
- "Reserved",
- "ReceiveLegRateType",
- "ReceiveUnderlyingType",
- "ReceiveUnderlyingSecurity",
- "ReceiveUnderlyingDescription",
- "ReceiveFloatRate",
- "ReceiveFirstCouponDate",
- "ReceiveFirstCouponRate",
- "ReceiveFixedRate",
- "ReceiveDaycount",
- "ReceiveFrequency",
- "ReceivePaymentBDC",
- "ReceiveEffectiveDate",
- "ReceiveMaturityDate",
- "ReceiveNotional",
- "ReceivePrice",
- "ReceiveArrears",
- "Reserved",
- "Reserved",
- "ReceiveCurrency",
- "Reserved",
- "ReceiveSpread",
- "PayLegRateType",
- "PayUnderlyingType",
- "PayUnderlyingSecurity",
- "PayUnderlyingDescription",
- "PayFloatRate",
- "PayFirstCouponDate",
- "PayFirstCouponRate",
- "PayFixedRate",
- "PayDaycount",
- "PayFrequency",
- "PayPaymentBDC",
- "PayEffectiveDate",
- "PayMaturityDate",
- "PayNotional",
- "PayPrice",
- "PayArrears",
- "Reserved",
- "Reserved",
- "PayCurrency",
- "Reserved",
- "PaySpread",
- "Reserved",
- "InitialMargin",
- "InitialMarginPercent",
- "InitialMarginCurrency",
- "ClientReference",
- "CcpTradeRef",
- "BlockId",
- "BlockAmount",
- "Netting Id",
- "ExchangeRate",
- "ReceiveQuantity",
- "PayQuantity",
- "ReceiveAccrued",
- "PayAccrued",
- "ReceiveNotionalExchange",
- "PayNotionalExchange",
- "ReceiveResetLag",
- "PayResetLag",
- "Reserved",
- "Reserved",
- "Reserved",
- "Reserved",
- "ReceiveCalendar",
- "PayCalendar",
- "ReceiveInterestCalcMethod",
- "PayInterestCalcMethod",
- "ReceiveCompoundAverageFrequency",
- "PayCompoundAverageFrequency",
- "ReceiveFixingFrequency",
- "PayFixingFrequency",
- "ReceiveStubLocation",
- "ReceiveBeginFloatRate1",
- "ReceiveBeginFloatRate2",
- "ReceiveEndFloatRate1",
- "ReceiveEndFloatRate2",
- "PayStubLocation",
- "PayBeginFloatRate1",
- "PayBeginFloatRate2",
- "PayEndFloatRate1",
- "PayEndFloatRate2",
- "Fees",
- "Fee Payment Dates",
- "Fee Comments",
- "ExecutionDateTimeStamp",
- "FeeTypes",
- "FeeCurrencies",
- "ReceivePaymentAt",
- "PayPaymentAt",
- "SwapType",
- "Reserved1",
- "ReceiveAccrualBDC",
- "PayAccrualBDC",
- "ReceiveMaturityBDC",
- "PayMaturityBDC",
- "ReceiveRollConvention",
- "PayRollConvention",
- "ReceivePaymentLag",
- "PayPaymentLag",
- "ReceiveSettlementCurrency",
- "PaySettlementCurrency",
- "Collateralized",
- "TradeDateFX",
- ],
- "irs": [
- "Reserved3",
- "Reserved4",
- "RecLegType",
- "RecIndex",
- "RecFirstCpnDate",
- "RecFirstCpnRate",
- "RecFixedRate",
- "RecDayCount",
- "RecPaymentFreq",
- "ReceivePaymentBDC",
- "RecEffectiveDate",
- "RecMaturityDate",
- "RecNotional",
- "RecArrears",
- "Reserved5",
- "RecCompound",
- "RecCurrency",
- "Reserved6",
- "PayLegType",
- "PayIndex",
- "PayFirstCpnDate",
- "PayFirstCpnRate",
- "PayFixedRate",
- "PayDayCount",
- "PayPaymentFreq",
- "PayPaymentBDC",
- "PayEffectiveDate",
- "PayMaturityDate",
- "PayNotional",
- "PayArrears",
- "Reserved7",
- "PayCompound",
- "PayCurrency",
- "Reserved8",
- "InitialMargin",
- "InitialMarginPercent",
- "InitialMarginCcy",
- "CalendarPay",
- "CalendarReceive",
- "Reserved9",
- "RecFloatingRateSpread",
- "RecFixingFreq",
- "RecInterestCalcMethod",
- "Reserved10",
- "PayFloatingRateSpread",
- "PayFixingFreq",
- "PayInterestCalcMethod",
- "Reserved11",
- "GiveUpBroker",
- "RecBrokenPeriod",
- "RecBeginFloatRate1",
- "RecBeginFloatRate2",
- "RecEndFloatRate1",
- "RecEndFloatRate2",
- "PayBrokenPeriod",
- "PayBeginFloatRate1",
- "PayBeginFloatRate2",
- "PayEndFloatRate1",
- "PayEndFloatRate2",
- "Reserved12",
- "Reserved13",
- "SwapType",
- "InflationMarketConv",
- "ClientRef",
- "Reserved14",
- "Reserved15",
- "Reserved16",
- "Reserved17",
- "Reserved18",
- "Reserved19",
- "RecResetLag",
- "PayResetLag",
- "RecExchangeAmount",
- "PayExchangeAmount",
- "AssociatedDealType",
- "AssociatedDealId",
- "ClearingFacility",
- "CcpTradeRef",
- "BreakClauseFreq",
- "BlockId",
- "BlockAmount",
- "UpfrontFee",
- "UpfrontFeePayDate",
- "UpfrontFeeComment",
- "UpfrontFeeCurrency",
- "NettingId",
- "BreakClauseDate",
- "Reserved20",
- "IndexLevel",
- "TradeDateTime",
- "ReceivePaymentLag",
- "PayPaymentLag",
- "ReceiveRateMultiplier",
- "PayRateMultiplier",
- "ReceiveRateCap",
- "PayRateCap",
- "ReceiveRateFloor",
- "PayRateFloor",
- "ReceiveRollConvention",
- "PayRollConvention",
- "ReceiveAccrualBDC",
- "PayAccrualBDC",
- "ReceiveMaturityBDC",
- "PayMaturityBDC",
- "ReceivePaymentAt",
- "PayPaymentAt",
- "ReceiveClientMargin",
- "PayClientMargin",
- "Resvered21",
- "ReceiveRateCutOff",
- "PayRateCutOff",
- "ReceiveInflationLag",
- "PayInflationLag",
- "ReceiveSettlementCurrency",
- "PaySettlementCurrency",
- "CounterpartyReference",
- "ReceiveInflationReference",
- "PayInflationReference",
- "Collateralized",
- "InitialFXRate",
- "TradeDateFX",
- "ReceiveFixingSource",
- "PayFixingSource",
- "ReceiveFxFixingLag",
- "PayFxFixingLag",
- "ReceiveFxFixingCalendar",
- "PayFxFixingCalendar",
- "SEFFlag",
- "ReceiveObservationShift",
- "PayObservationShift",
- "ReceiveCashFlowStubType",
- "PayCashFlowStubType",
- ],
- "iam": HEADERS_PRE
- + [
- "SettlementDate",
- "Reserved",
- "InstrumentType",
- "ExpirationDate",
- "CallNoticeIndicator",
- "TransactionIndicator",
- "StartMoney",
- "Currency",
- "Rate",
- "Commission",
- "DealFunction",
- "FromAccount",
- "ClientReference",
- "Basis",
- "MarginType",
- "ClearingFacility" "CcpTradeRef",
- "BlockId",
- "BlockAmount",
- "ExecutionDateTimeStamp",
- "Collateralized",
- "TradeDateFX",
- ],
- "termination": [
- "DealType",
- "DealId",
- "Action",
- "Client",
- "SubAction",
- "PartialTermination",
- "TerminationAmount",
- "TerminationDate",
- "FeesPaid",
- "FeesReceived",
- "DealFunction",
- "Reserved",
- "ClientReference",
- "TradeDate",
- "EffectiveDate",
- "FirstCouponDate",
- "FeePaymentDate",
- "SpecialInstructions",
- "AssignedCounterparty",
- "AssignmentFee",
- "AssignedFeeTradeDate",
- "AssignedFeeValueDate",
- "AssignedCustodian",
- "AssignedCashAccount",
- "Reserved",
- "FeeCurrency",
- "GoTradeId",
- "FeeComments",
- "ZeroOutInterestCashFlows",
- "Reserved",
- "Reserved",
- "Reserved",
- "Reserved",
- "Reserved",
- "Reserved",
- "Reserved",
- "InitialMargin",
- "InitialMarginCurrency",
- ],
-}
-
-
-POSITION_HEADERS = {
- "bond": [
- "AccountNumber",
- "COB Date",
- "Prime Broker",
- "SecurityType",
- "CUSIP",
- "ISIN",
- "SEDOL",
- "SecurityDescription",
- "Position",
- "MarketPrice",
- "Currency",
- "Base Market Value",
- "Local Market Value",
- "Fx Rate",
- "CurrentFace",
- ],
- "future": [
- "AccountNumber",
- "COB Date",
- "Prime Broker",
- "SecurityType",
- "BBGTicker",
- "RIC",
- "UnderlyingSecurity",
- "SecurityDescription",
- "Currency",
- "Quantity",
- "OpenTradeEquity",
- "ClosingPrice",
- "MaturityDate",
- "Unrealised P&L in USD",
- "Local Market Value",
- "Fx Rate",
- ],
- "otc": [
- "Client Name",
- "Fund Name",
- "Counterparty",
- "Product Type",
- "Unique Deal ID",
- "TransactionIndicator (Buy/Sell)",
- "PutCall Indicator (Call/Put)",
- "CapFloorIndicator",
- "CurrencyPair",
- "DealCurrencyA",
- "DealCurrencyB",
- "NotionalA",
- "NotionalB",
- "OriginalPrice",
- "Strike",
- "FixedRate",
- "Quantity",
- "Start Date",
- "Effective Date",
- "Maturity Date",
- "Underlying Maturity",
- "RecPayFixed",
- "Underlying (ISIN / CUSP / RED CODES)",
- "Underlying Desc",
- "Exercise Type",
- "MTM Currency",
- "MTM Valuation",
- "COB Date",
- "Clearing House Name",
- ],
-}
diff --git a/python/csv_headers/irs.py b/python/csv_headers/irs.py
deleted file mode 100644
index 144d898c..00000000
--- a/python/csv_headers/irs.py
+++ /dev/null
@@ -1,136 +0,0 @@
-irs_new = [
- "DealType",
- "DealId",
- "Action",
- "Client",
- "Fund",
- "Portfolio/Business Unit",
- "Strategy",
- "Custodian",
- "CashAccount",
- "Counterparty",
- "Comments",
- "State",
- "TradeDate",
- "Reserved",
- "Reserved",
- "ReceiveLegRateType",
- "ReceiveFloatRate",
- "ReceiveFirstCouponDate",
- "ReceiveFirstCouponRate",
- "ReceiveFixedRate",
- "ReceiveDaycount",
- "ReceiveFrequency",
- "RecievePaymentBDC",
- "ReceiveEffectiveDate",
- "ReceiveMaturityDate",
- "ReceiveNotional",
- "ReceiveResetArrears",
- "Reserved",
- "Reserved",
- "ReceiveCurrency",
- "Reserved",
- "PayLegRateType",
- "PayFloatRate",
- "PayFirstCouponDate",
- "PayFirstCouponRate",
- "PayFixedRate",
- "PayDaycount",
- "PayFrequency",
- "PayPaymentBDC[Previously PaymentRollConv]",
- "PayEffectiveDate",
- "PayMaturityDate",
- "Pay Notional",
- "PayResetArrears",
- "Reserved",
- "Reserved",
- "PayCurrency",
- "Reserved",
- "InitialMargin",
- "InitialMarginPercentage",
- "InitialMarginCurrency",
- "CalendarPay",
- "CalendarReceive",
- "Reserved",
- "ReceiveSpread",
- "ReceiveFixingFrequency",
- "ReceiveInterestCalcMethod",
- "Reserved",
- "PaySpread",
- "PayFixingFrequency",
- "PayInterstCalcMethod",
- "Reserved",
- "GiveUpCounterparty",
- "ReceiveStubLocation",
- "ReceiveBeginFloatRate1",
- "ReceiveBeginFloatRate2",
- "ReceiveEndFloatRate1",
- "ReceiveEndFloatRate2",
- "PayStubLocation",
- "PayBeginFloatRate1",
- "PayBeginFloatRate2",
- "PayEndFloatRate1",
- "PayEndFloatRate2",
- "Reserved",
- "Reserved",
- "SwapType",
- "Reserved",
- "ClientReference",
- "Reserved",
- "Reserved",
- "Reserved",
- "Reserved",
- "Reserved",
- "Reserved",
- "ReceiveResetLag",
- "PayResetLag",
- "ReceiveExchangeAmount",
- "PayExchangeAmount",
- "AssociatedDealType",
- "AssociatedDealId",
- "ClearingFacility",
- "CcpTradeRef",
- "BreakClauseFrequency",
- "BlockId",
- "BlockAmount",
- "UpfrontFee",
- "UpfrontFeePaydate",
- "UpFrontFeeComments",
- "UpfrontFeeCurrency ",
- "Netting Id",
- "BreakClauseDate",
- "CashFlowStubType",
- "IndexLevel",
- "ExecutionDateTimeStamp",
- "ReceivePaymentLag",
- "PayPaymentLag",
- "ReceiveRateMultiplier",
- "PayRateMultiplier",
- "ReceiveRateCap",
- "PayRateCap",
- "ReceiveRateFloor",
- "PayRateFloor",
- "ReceiveRollConvention",
- "PayRollConvention",
- "ReceiveAccrualBDC",
- "PayAccrualBDC",
- "ReceiveMaturityBDC",
- "PayMaturityBDC",
- "ReceivePaymentAt",
- "PayPaymentAt",
- "ReceiveClientMargin",
- "PayClientMargin",
- "Reserved1",
- "ReceiveRateCutOff",
- "PayRateCutOff",
- "InflationLag",
- "InflationReference",
- "ReceiveSettlementCurrency",
- "PaySettlementCurrency",
- "CounterpartyReference",
- "ReceiveInflationReference",
- "PayInflationReference",
- "Collateralized",
- "InitialFXRate",
- "TradeDateFX",
-]
diff --git a/python/csv_headers/mtm_upload.py b/python/csv_headers/mtm_upload.py
deleted file mode 100644
index d5619616..00000000
--- a/python/csv_headers/mtm_upload.py
+++ /dev/null
@@ -1,201 +0,0 @@
-mtm_term = [
- "Swap ID",
- "Allocation ID",
- "Description",
- "Broker Id",
- "DTCC CounterParty ID",
- "Trade ID",
- "Trade Date",
- "Effective Date",
- "Settle Date",
- "Maturity Date",
- "Account Abbreviation",
- "1st Leg Notional",
- "Currency Code",
- "1st Leg Rate",
- "Initial Payment",
- "Initial Payment Currency",
- "Payment Frequency Description",
- "Original Issue Date",
- "Interest Payment Method Description",
- "First Payment Date",
- "Product Type",
- "Product Sub Type",
- "Transaction Type",
- "Protection",
- "Transaction Code",
- "Remaining Party",
- "DTCC Remaining CounterParty ID",
-]
-mtm_swaption = [
- "Swap ID",
- "Broker Id",
- "Trade ID",
- "Trade Date",
- "Settle Date",
- "Supplement Date",
- "Supplement 2 Date",
- "Maturity Date",
- "Account Abbreviation",
- "1st Leg Notional",
- "Currency Code",
- "1st Leg Rate",
- "Initial Payment Currency",
- "Initial Payment",
- "Product Type",
- "Transaction Type",
- "Transaction Code",
- "Independent Amount (%)",
- "RED",
- "Issuer Name",
- "Entity Matrix",
- "Definitions Type",
- "Swaption Expiration Date",
- "Strike Price",
- "Swaption Settlement Type",
- "Master Document Date",
- "OptionBuySellIndicator",
- "Clearing House",
- "Protection",
- "Swaption Quotation Rate Type",
- "Effective Date",
-]
-
-mtm_cds = [
- "Swap ID",
- "Allocation ID",
- "Description",
- "Broker Id",
- "DTCC CounterParty ID",
- "Trade ID",
- "Trade Date",
- "Effective Date",
- "Settle Date",
- "Maturity Date",
- "Account Abbreviation",
- "1st Leg Notional",
- "Currency Code",
- "1st Leg Rate",
- "Initial Payment",
- "Initial Payment Currency",
- "Original Issue Date",
- "Interest Payment Method Description",
- "First Payment Date",
- "Product Type",
- "Product Sub Type",
- "Transaction Type",
- "Protection",
- "Transaction Code",
- "Remaining Party",
- "DTCC Remaining CounterParty ID",
- "Independent Amount (%)",
- "Independent Amount ($)",
- "RED",
- "Issuer Name",
- "Settlement Amount",
- "Trader",
- "Executing Broker",
- "Dealer Trade ID",
- "Notes",
- "Parent Transaction Code",
- "Parent Trade Date",
- "Parent Notional",
- "Parent Currency Code",
- "Parent Net Amount",
- "Parent Effective Date",
- "Parent First Payment Date",
- "Parent Settle Date",
- "ComplianceHubAction",
- "DTCC Ineligible",
- "Master Document Date",
- "Master Document Version",
- "Include Contractual Supplement",
- "Contractual Supplement",
- "Supplement Date",
- "Entity Matrix",
- "Entity Matrix Date",
- "Modified Equity Delivery",
- "Calculation Agent Business Center",
- "Calculation Agent",
- "Attachment Point",
- "Exhaustion Point",
- "Strategy",
- "First Payment Period Accrual Start Date",
- "TieOut Ineligible",
- "Electronic Consent Ineligible",
- "External OMS ID",
- "Independent Amount Currency",
- "Independent Amount Payer",
- "Trade Revision",
- "Alternate Swap ID",
- "Alternate Trade ID",
- "Definitions Type",
-]
-mtm_trs = [
- "Swap ID ",
- "Allocation ID",
- "Description ",
- "Broker Id ",
- "DTCC CounterParty ID",
- "Trade ID ",
- "Trade Date ",
- "Effective Date",
- "Settle Date",
- "Maturity Date ",
- "Account Abbreviation ",
- "1st Leg Notional",
- "Currency Code ",
- "Initial Payment",
- "Initial Payment Currency",
- "Original Issue Date",
- "Interest Payment Method Description",
- "Product Type ",
- "Product Sub Type",
- "Transaction Type ",
- "Protection",
- "Transaction Code",
- "Remaining Party ",
- "DTCC Remaining CounterParty ID",
- "Independent Amount (%)",
- "Independent Amount ($)",
- "RED",
- "Issuer Name",
- "Settlement Amount",
- "Trader",
- "Dealer Trade ID",
- "Notes",
- "Parent Transaction Code",
- "Parent Trade Date",
- "Parent Notional",
- "Parent Currency Code",
- "Parent Net Amount",
- "Parent Effective Date",
- "Parent First Payment Date",
- "Parent Settle Date",
- "ComplianceHubAction",
- "DTCC Ineligible",
- "Master Document Date",
- "Master Document Type",
- "Master Document Version",
- "",
- "",
- "Annex Date",
- "Supplement Date",
- "Documentation Type",
- "Calculation Agent Business Center",
- "",
- "Strategy",
- "Electronic Consent Ineligible",
- "External OMS ID",
- "Traded Rate/Price",
- "Independent Amount Currency",
- "Independent Amount Payer",
- "Trade Revision",
- "Alternate Swap ID",
- "Alternate Trade ID",
- "Definitions Type",
- "Initial Fixing Amount",
- "2nd Leg Index",
- "2nd Leg Spread",
- "2nd Leg Initial Floating Rate",
-]
diff --git a/python/csv_headers/test.py b/python/csv_headers/test.py
deleted file mode 100644
index e578fa77..00000000
--- a/python/csv_headers/test.py
+++ /dev/null
@@ -1,136 +0,0 @@
-mtm_trs = [
- "Swap ID ",
- "Allocation ID",
- "Description ",
- "Broker Id ",
- "DTCC CounterParty ID",
- "Trade ID ",
- "Trade Date ",
- "Effective Date",
- "Settle Date",
- "Maturity Date ",
- "Account Abbreviation ",
- "1st Leg Notional",
- "Currency Code ",
- "Initial Payment",
- "Initial Payment Currency",
- "Original Issue Date",
- "Interest Payment Method Description",
- "Product Type ",
- "Product Sub Type",
- "Transaction Type ",
- "Protection",
- "Transaction Code",
- "Remaining Party ",
- "DTCC Remaining CounterParty ID",
- "Independent Amount (%)",
- "Independent Amount ($)",
- "RED",
- "Issuer Name",
- "Settlement Amount",
- "Trader",
- "Dealer Trade ID",
- "Notes",
- "Parent Transaction Code",
- "Parent Trade Date",
- "Parent Notional",
- "Parent Currency Code",
- "Parent Net Amount",
- "Parent Effective Date",
- "Parent First Payment Date",
- "Parent Settle Date",
- "ComplianceHubAction",
- "DTCC Ineligible",
- "Master Document Date",
- "Master Document Type",
- "Master Document Version",
- "",
- "",
- "Annex Date",
- "Supplement Date",
- "Documentation Type",
- "Calculation Agent Business Center",
- "",
- "Strategy",
- "Electronic Consent Ineligible",
- "External OMS ID",
- "Traded Rate/Price",
- "Independent Amount Currency",
- "Independent Amount Payer",
- "Trade Revision",
- "Alternate Swap ID",
- "Alternate Trade ID",
- "Definitions Type",
- "Initial Fixing Amount",
- "2nd Leg Index",
- "2nd Leg Spread",
- "2nd Leg Initial Floating Rate",
-]
-mtm_trs = [
- "Swap ID ",
- "Allocation ID",
- "Description ",
- "Broker Id ",
- "DTCC CounterParty ID",
- "Trade ID ",
- "Trade Date ",
- "Effective Date",
- "Settle Date",
- "Maturity Date ",
- "Account Abbreviation ",
- "1st Leg Notional",
- "Currency Code ",
- "Initial Payment",
- "Initial Payment Currency",
- "Original Issue Date",
- "Interest Payment Method Description",
- "Product Type ",
- "Product Sub Type",
- "Transaction Type ",
- "Protection",
- "Transaction Code",
- "Remaining Party ",
- "DTCC Remaining CounterParty ID",
- "Independent Amount (%)",
- "Independent Amount ($)",
- "RED",
- "Issuer Name",
- "Settlement Amount",
- "Trader",
- "Dealer Trade ID",
- "Notes",
- "Parent Transaction Code",
- "Parent Trade Date",
- "Parent Notional",
- "Parent Currency Code",
- "Parent Net Amount",
- "Parent Effective Date",
- "Parent First Payment Date",
- "Parent Settle Date",
- "ComplianceHubAction",
- "DTCC Ineligible",
- "Master Document Date",
- "Master Document Type",
- "Master Document Version",
- "",
- "",
- "Annex Date",
- "Supplement Date",
- "Documentation Type",
- "Calculation Agent Business Center",
- "",
- "Strategy",
- "Electronic Consent Ineligible",
- "External OMS ID",
- "Traded Rate/Price",
- "Independent Amount Currency",
- "Independent Amount Payer",
- "Trade Revision",
- "Alternate Swap ID",
- "Alternate Trade ID",
- "Definitions Type",
- "Initial Fixing Amount",
- "2nd Leg Index",
- "2nd Leg Spread",
- "2nd Leg Initial Floating Rate",
-]
diff --git a/python/custodian_wire.py b/python/custodian_wire.py
deleted file mode 100644
index a6fd5fba..00000000
--- a/python/custodian_wire.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from report_ops.wires import BowdstWire, NTWire
-import datetime
-
-if __name__ == "__main__":
- from serenitas.utils.exchange import ExchangeMessage
- import logging
- import argparse
-
- logger = logging.getLogger(__name__)
- parser = argparse.ArgumentParser()
- parser.add_argument(
- "workdate",
- nargs="?",
- type=datetime.date.fromisoformat,
- default=datetime.date.today(),
- help="working date",
- )
- args = parser.parse_args()
- for wire_report in (BowdstWire, NTWire):
- try:
- wire_report.to_db(args.workdate)
- except ValueError as e:
- logger.info(e)
diff --git a/python/exploration/dispersion.py b/python/exploration/dispersion.py
index e633264d..a2165754 100644
--- a/python/exploration/dispersion.py
+++ b/python/exploration/dispersion.py
@@ -5,10 +5,16 @@ import statsmodels.api as sm
import statsmodels.formula.api as smf
from serenitas.analytics.basket_index import MarkitBasketIndex
-from serenitas.analytics import CreditIndex
+from serenitas.analytics.api import CreditIndex
from scipy.special import logit, expit
from serenitas.utils.db import dbengine
+from sklearn.feature_selection import RFECV, RFE
+from sklearn.model_selection import train_test_split
+from sklearn.pipeline import make_pipeline
+from sklearn.preprocessing import PolynomialFeatures, PowerTransformer
+from sklearn.linear_model import LinearRegression
+
def get_corr_data(index_type, series, engine):
sql_str = (
@@ -106,11 +112,6 @@ def create_models(conn, df) -> (pd.DataFrame, float):
f.predict(bottom_stack)
)
- def aux(s):
- temp = s.values
- temp[-1] = 1 - temp[:-1].sum()
- return temp
-
df["predict"] = df.groupby(["index", "series", "date"])["predict"].transform(aux)
df = df.assign(
mispricing=(df.exp_percentage - df.predict)
@@ -148,11 +149,6 @@ def create_models_v2(conn, df, weights=None) -> (pd.DataFrame, float):
df.predict_tranche_loss * df.thickness / df.index_expected_loss
)
- def aux(s):
- temp = s.values
- temp[-1] = 1 - temp[:-1].sum()
- return temp
-
df["predict"] = df.groupby(["index", "series", "date"])["predict"].transform(aux)
df = df.assign(
mispricing=(df.exp_percentage - df.predict)
@@ -202,6 +198,84 @@ def create_separate_models(df):
return (calc, model)
+def aux(s):
+ temp = s.values
+ temp[-1] = 1 - temp[:-1].sum()
+ return temp
+
+
+def create_rfe_models(df, print_score=False):
+ # Takes the output of get_tranche_data
+ attach_max = df.index.get_level_values("attach").max()
+ bottom_stack = df[df.index.get_level_values("attach") != attach_max]
+ bottom_stack = bottom_stack[bottom_stack.tranche_loss_per > 0].dropna()
+
+ # prepare the variables
+ y = logit(bottom_stack["tranche_loss_per"])
+ X = bottom_stack[
+ ["index_duration", "index_basis", "att_moneyness", "det_moneyness", "gini"]
+ ]
+
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
+
+ pipe_rfe = make_pipeline(
+ PowerTransformer(),
+ PolynomialFeatures(interaction_only=True),
+ RFECV(estimator=LinearRegression(), cv=10, min_features_to_select=1),
+ )
+
+ pipe_rfe.fit(X_train, y_train)
+ pipe_rfe.steps[-1] = (
+ "rfe",
+ RFE(
+ estimator=LinearRegression(),
+ n_features_to_select=pipe_rfe["rfecv"].n_features_,
+ ),
+ )
+ model = pipe_rfe.fit(X_train, y_train)
+
+ df = pd.merge(
+ df,
+ pd.DataFrame(
+ expit(model.predict(X)), index=X.index, columns=["predict_tranche_loss"]
+ ),
+ how="left",
+ left_index=True,
+ right_index=True,
+ )
+
+ df.loc[
+ df.index.get_level_values("attach") != attach_max,
+ "predict_tranche_loss_per_index",
+ ] = (
+ df.predict_tranche_loss * df.thickness / df.index_expected_loss
+ )
+
+ df["predict_tranche_loss_per_index"] = df.groupby(["index", "series", "date"])[
+ "predict_tranche_loss_per_index"
+ ].transform(aux)
+ df = df.assign(
+ mispricing=(df.exp_percentage - df.predict_tranche_loss_per_index)
+ * df.index_expected_loss
+ / (df.detach_adj - df.attach_adj)
+ )
+
+ if print_score:
+ index_type = df.index[0][1]
+ print(index_type, " num features: ", model.feature_names_in_)
+ print(
+ index_type,
+ " Chosen columns: ",
+ np.array(model["polynomialfeatures"].get_feature_names_out(X.columns))[
+ model["rfe"].support_
+ ],
+ )
+ print(index_type, " Training Score: ", model.score(X_train, y_train))
+ print(index_type, " Testing Score: ", model.score(X_test, y_test))
+
+ return (df, model)
+
+
if __name__ == "__main__":
index_type = "HY"
series = 29
diff --git a/python/baml_fcm_fx.py b/python/fcm_fx.py
index 1c7984bb..2e470e67 100644
--- a/python/baml_fcm_fx.py
+++ b/python/fcm_fx.py
@@ -5,13 +5,19 @@ from report_ops.misc import _recipients, _cc_recipients
from serenitas.utils.exchange import ExchangeMessage
from exchangelib import HTMLBody
-_fcm_alias = {"V0NSCLMSPT": "6MZ20049"}
+_account_alias = {"V0NSCLMSPT": "6MZ20049"}
def email_fcm(em, d, fund):
- fcm_account = _fcm_alias.get(d["cash_account"], d["cash_account"])
+ account = _account_alias.get(d["cash_account"], d["cash_account"])
+ match d["cp_code"]:
+ case "BAMSNY":
+ recipients = _recipients["BAML_FCM"]
+ case "GOLDNY":
+ recipients = _recipients["GS_FCM"]
+ additional_instructions = f"Could you please instruct moves from DCS 057575201 to Futures {account} to settle this trade? "
em.send_email(
- f"FX Details: {fcm_account} Trade Date: {d['trade_date']}",
+ f"FX Details: {account} Trade Date: {d['trade_date']}",
HTMLBody(
f"""
<html>
@@ -22,11 +28,11 @@ def email_fcm(em, d, fund):
</style>
</head>
<body>
- Hello,<br><br>Please see below details for an FX Spot Trade we did with the desk today for account {fcm_account}. Please let me know if you need more information.<br><br>{to_tabulate(d)}
+ Hello,<br><br>Please see below details for an FX Spot Trade we did with the desk today for account {account}. {additional_instructions if additional_instructions else ""}Please let me know if you need more information.<br><br>{to_tabulate(d)}
</body>
</html>"""
),
- to_recipients=_recipients["BAML_FCM"],
+ to_recipients=recipients,
cc_recipients=_cc_recipients[fund],
)
@@ -36,9 +42,9 @@ def to_tabulate(d):
key1, key2 = "buy", "sell"
else:
key1, key2 = "sell", "buy"
- fcm_account = _fcm_alias.get(d["cash_account"], d["cash_account"])
+ account = _account_alias.get(d["cash_account"], d["cash_account"])
line = [
- fcm_account,
+ account,
d[f"{key1}_currency"],
d[f"{key1}_amount"],
d[f"{key2}_currency"],
@@ -71,9 +77,9 @@ def to_tabulate(d):
def main(trade_date, conn, fund, em):
with conn.cursor() as c:
c.execute(
- "SELECT spots.cash_account, buy_currency, sell_currency, buy_amount, sell_amount, spot_rate, settle_date, trade_date FROM spots "
+ "SELECT spots.cash_account, buy_currency, sell_currency, buy_amount, sell_amount, spot_rate, settle_date, trade_date, spots.cp_code FROM spots "
"LEFT JOIN accounts2 USING (cash_account) "
- "WHERE account_type='Fcm' AND spots.cp_code='BAMSNY' AND spots.trade_date =%s AND spots.fund=%s",
+ "WHERE account_type in ('Fcm', 'Future') AND spots.cp_code in ('BAMSNY', 'GOLDNY') AND spots.trade_date =%s AND spots.fund=%s",
(trade_date, fund),
)
for rec in c:
@@ -98,5 +104,6 @@ if __name__ == "__main__":
for fund in (
"SERCGMAST",
"ISOSEL",
+ "BOWDST",
):
main(args.date, conn, fund, em)
diff --git a/python/innocap_file_transfer.py b/python/innocap_file_transfer.py
index db555a66..7e39a264 100644
--- a/python/innocap_file_transfer.py
+++ b/python/innocap_file_transfer.py
@@ -2,8 +2,9 @@ from io import BytesIO
from serenitas.utils.env import DAILY_DIR
import datetime
from csv_headers.citco import GIL, GTL
-from serenitas.utils.remote import SftpClient
+from serenitas.utils.remote import Client
from serenitas.analytics.dates import prev_business_day
+from report_ops.sma import build_position_file
def concat_csv(file_type, date):
@@ -23,16 +24,28 @@ def concat_csv(file_type, date):
return buf.getvalue(), fname
-def main(date, upload):
+def upload_citco_files(date, upload):
for file_type in ("trade", "instrument"):
buf, fname = concat_csv(file_type, date)
dest = DAILY_DIR / str(date) / fname
dest.write_bytes(buf)
if upload:
- innocap_sftp = SftpClient.from_creds("innocap", folder="Innocap")
+ innocap_sftp = Client.from_creds("innocap", folder="Innocap")
innocap_sftp.put(buf, fname)
+def upload_position_files(date, fund, upload):
+ buf, dest = build_position_file(
+ date,
+ fund,
+ )
+ if upload:
+ client = Client.from_creds("innocap", folder="Innocap")
+ client.put(buf, dest.name)
+ client = Client.from_creds("citco")
+ client.put(buf, dest.name)
+
+
if __name__ == "__main__":
import argparse
@@ -53,4 +66,5 @@ if __name__ == "__main__":
)
args = parser.parse_args()
- main(args.cob, not args.no_upload)
+ upload_citco_files(args.cob, not args.no_upload)
+ upload_position_files(args.cob, "ISOSEL", not args.no_upload)
diff --git a/python/isosel_reports.py b/python/isosel_reports.py
deleted file mode 100644
index 50c243aa..00000000
--- a/python/isosel_reports.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from report_ops.admin import AccruedReport, AllReport
-import datetime
-from serenitas.analytics.dates import prev_business_day
-import logging
-
-logger = logging.getLogger(__name__)
-if __name__ == "__main__":
- import argparse
-
- parser = argparse.ArgumentParser()
- parser.add_argument(
- "date",
- nargs="?",
- type=datetime.date.fromisoformat,
- default=prev_business_day(datetime.date.today()),
- help="report date",
- )
- args = parser.parse_args()
- for report_cls in (
- AccruedReport,
- AllReport,
- ):
- try:
- report_cls.to_db(args.date)
- except ValueError as e:
- logger.info(e)
diff --git a/python/load_globeop_report.py b/python/load_globeop_report.py
index 519ecaa0..678046ea 100644
--- a/python/load_globeop_report.py
+++ b/python/load_globeop_report.py
@@ -33,6 +33,7 @@ def read_valuation_report(f):
df["PeriodEndDate"] = date - bus_day
df["row"] = df.index
df["Fund"] = df.Fund.str.replace("BOS_PAT_BOWDOIN", "BOWDST")
+ df["CounterPartyCode"] = df["CounterPartyCode"].str.replace("BTIGNY", "BTIG")
if "AccountingPeriod" in df:
del df["AccountingPeriod"]
if "CounterParty" in df:
@@ -154,7 +155,7 @@ def read_cds_report(f):
df.loc[df.strategy == "SERCGMAST__MBSCDS", "strategy"] = "MBSCDS"
df.strategy = df.strategy.str.replace("SER_", "")
df["buy/sell"] = df["buy/sell"].astype("category")
- df["buy/sell"].cat.categories = ["Buyer", "Seller"]
+ df["buy/sell"] = df["buy/sell"].cat.rename_categories(["Buyer", "Seller"])
del df["independent_%"]
df2 = df2.rename(columns={"independent_%": "independent_perc"})
df.prime_broker = df.prime_broker.where(df.prime_broker != "NONE")
diff --git a/python/manual_bond_upload.py b/python/manual_bond_upload.py
new file mode 100644
index 00000000..3b7b6e87
--- /dev/null
+++ b/python/manual_bond_upload.py
@@ -0,0 +1,14 @@
+from serenitas.ops.trade_dataclasses import BondDeal
+from serenitas.utils.db import dbconn
+from serenitas.ops.funds import Serenitas
+
+
+conn = dbconn("dawndb")
+with conn.cursor() as c:
+ c.execute(
+ "SELECT * from bond_trades WHERE settle_date > '2022-11-15' AND account= 'BAC'"
+ )
+ for row in c:
+ trade = BondDeal.from_dict(**row._asdict(), scaled=True)
+ Serenitas.staging_queue.append(trade.to_globeop("UPDATE"))
+ Serenitas.build_buffer("bond")
diff --git a/python/notebooks/dispersion_tranche_model.ipynb b/python/notebooks/dispersion_tranche_model.ipynb
index 46eb348c..56255a42 100644
--- a/python/notebooks/dispersion_tranche_model.ipynb
+++ b/python/notebooks/dispersion_tranche_model.ipynb
@@ -18,7 +18,7 @@
"import serenitas.analytics.tranche_data as tdata\n",
"\n",
"from serenitas.analytics.basket_index import MarkitBasketIndex\n",
- "from serenitas.analytics import on_the_run\n",
+ "from serenitas.analytics.index_data import on_the_run\n",
"from statsmodels.graphics.regressionplots import plot_fit\n",
"from scipy.special import logit, expit\n",
"from serenitas.utils.db import dbengine, dbconn\n",
@@ -52,117 +52,18 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "#Section 1----------------------------------------------------\n",
- "#index basis doesn't work with HY (opposite reaction to what I think)\n",
- "#RFE\n",
- "drop_variable_list = ['tranche_loss_per', 'tranche_id', 'index_price', 'detach', 'corr_at_detach', \n",
- " 'corr01', 'exp_percentage', 'indexfactor', 'duration', 'index_expected_loss',\n",
- " 'index_theta', 'delta', 'expected_loss', 'attach_adj', 'detach_adj',\n",
- " 'cumulativeloss', \n",
- " 'forward_delta', \n",
- " #Comment out to include\n",
- " # 'index_duration',\n",
- " 'thickness',\n",
- " 'moneyness',\n",
- " # 'index_basis',\n",
- " # 'att_moneyness', \n",
- " # 'det_moneyness',\n",
- " 'dispersion',\n",
- " # 'gini', \n",
- " 'gamma',\n",
- " 'theta',\n",
- " 'index_theta'\n",
- " ]"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
- "def run_rfe(index_type):\n",
- " risk = disp.get_tranche_data(dbconn(\"serenitasdb\"), index_type)\n",
- " attach_max = risk.index.get_level_values(\"attach\").max()\n",
- " bottom_stack = risk[risk.index.get_level_values(\"attach\") != attach_max]\n",
- " bottom_stack = bottom_stack[bottom_stack.tranche_loss_per > 0].dropna()\n",
- "\n",
- " #prepare the variables\n",
- " y = logit(bottom_stack['tranche_loss_per'])\n",
- " X = bottom_stack.drop(drop_variable_list, axis=1)\n",
- " \n",
- " X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)\n",
- " \n",
- " pipe_rfe = make_pipeline (PowerTransformer(),\n",
- " #PolynomialFeatures(degree=2),\n",
- " PolynomialFeatures(interaction_only=True),\n",
- " RFECV(estimator=LinearRegression(), \n",
- " cv=10,\n",
- " min_features_to_select=1))\n",
- " \n",
- " pipe_rfe.fit(X_train, y_train)\n",
- " n_features_to_select = pipe_rfe['rfecv'].n_features_\n",
- " pipe_rfe.steps[-1]= ('rfe', RFE(estimator=LinearRegression(), n_features_to_select = n_features_to_select))\n",
- " model = pipe_rfe.fit(X_train, y_train)\n",
- " \n",
- " #RandomForest\n",
- " #params = {'n_estimators': 100,\n",
- " # 'min_samples_split': 3,\n",
- " # 'verbose':1,\n",
- " # 'n_jobs': -1}\n",
- " #randomforest = RandomForestRegressor(**params)\n",
- " \n",
- " \n",
- " #gradientboost\n",
- " #params = {'n_estimators': 500,\n",
- " # 'max_depth': 10,\n",
- " # 'min_samples_split': 3,\n",
- " # 'learning_rate': 0.01,\n",
- " # 'loss': 'huber',\n",
- " # 'verbose':1}\n",
- " #gb = GradientBoostingRegressor(**params).fit(X_train, y_train)\n",
- " \n",
- " #model = VotingRegressor([('rf', model), ('gb', gb)]).fit(X_train, y_train)\n",
- " #model = VotingRegressor([('lr', pipe_rfe)]).fit(X, logit(y))\n",
- "\n",
- " df = pd.merge(risk, \n",
- " pd.DataFrame(expit(model.predict(X)), \n",
- " index=X.index, \n",
- " columns=['predict_tranche_loss']),\n",
- " how='left', left_index=True, right_index=True)\n",
- "\n",
- " df.loc[df.index.get_level_values(\"attach\") != attach_max, \"predict_tranche_loss_per_index\"] = (\n",
- " df.predict_tranche_loss * df.thickness / df.index_expected_loss\n",
- " )\n",
- "\n",
- " def aux(s):\n",
- " temp = s.values\n",
- " temp[-1] = 1 - temp[:-1].sum()\n",
- " return temp\n",
- "\n",
- " df[\"predict_tranche_loss_per_index\"] = df.groupby([\"index\", \"series\", \"date\"])[\"predict_tranche_loss_per_index\"].transform(aux)\n",
- " df = df.assign(\n",
- " mispricing=(df.exp_percentage - df.predict_tranche_loss_per_index)\n",
- " * df.index_expected_loss\n",
- " / (df.detach_adj - df.attach_adj)\n",
- " )\n",
- " rfe_result = pipe_rfe\n",
- " print(index_type, \" num features: \", n_features_to_select)\n",
- " print(index_type, \" Chosen columns: \", np.array(rfe_result['polynomialfeatures'].get_feature_names_out(X.columns))[rfe_result['rfe'].support_])\n",
- " print(index_type, \" Training Score: \", model.score(X_train, y_train))\n",
- " print(index_type, \" Testing Score: \", model.score(X_test, y_test))\n",
- " \n",
- " return model, df, X\n",
- "\n",
- "gini_model, gini_results, gini_X = {}, {}, {}\n",
+ "#Run RFE model\n",
+ "gini_model, gini_results = {}, {}\n",
"fieldlist = ['exp_percentage','dispersion','gini','tranche_loss_per','mispricing']\n",
"for index_type in ['HY', 'IG', 'EU', 'XO']:\n",
- " gini_model[index_type], gini_results[index_type], gini_X[index_type] = run_rfe(index_type)\n",
+ " risk = disp.get_tranche_data(dbconn(\"serenitasdb\"), index_type)\n",
+ " risk = risk[risk.index_duration > 1] #filter out the short duration ones\n",
+ " gini_results[index_type], gini_model[index_type] = disp.create_rfe_models(risk)\n",
" gini_results[index_type][fieldlist].to_csv('/home/serenitas/edwin/DispersionModel/' + index_type + '_results_rfecv.csv')"
]
},
@@ -179,20 +80,27 @@
"for index_type in ['HY', 'IG', 'EU', 'XO']:\n",
" plots = {}\n",
" tranche_attach = []\n",
- "\n",
- " for i, X in gini_X[index_type].groupby('attach'):\n",
+ " \n",
+ " res = gini_results[index_type]\n",
+ " mod = gini_model[index_type]\n",
+ " \n",
+ " Xs = res[mod.feature_names_in_]\n",
+ " \n",
+ " for i, X in Xs.groupby('attach'):\n",
" tranche_attach.append(X.index[0][5])\n",
" for var in X.columns:\n",
" bins = np.linspace(X[var].min(), X[var].max(),num=steps)\n",
" testing_df = pd.DataFrame(bins, columns=[var])\n",
" for var_1 in X.drop(var, axis=1).columns:\n",
" testing_df = pd.concat([testing_df, pd.Series(np.repeat(X.iloc[-1][var_1], steps),name=var_1)], axis=1)\n",
- " plots[i, var] = pd.Series(expit(gini_model[index_type].predict(testing_df[X.columns])), index=testing_df[var])\n",
+ " plots[i, var] = pd.Series(expit(mod.predict(testing_df[X.columns])), index=testing_df[var])\n",
"\n",
+ " #breakpoint()\n",
+ " \n",
" sensitivies = pd.concat(plots, names=['attach', 'shock', 'value'])\n",
" sensitivies.to_csv('/home/serenitas/edwin/DispersionModel/' + index_type + '_sensitivies.csv')\n",
"\n",
- " fig, axes = plt.subplots(nrows=3, ncols=len(X.columns), figsize = (20,10))\n",
+ " fig, axes = plt.subplots(nrows=4, ncols=len(X.columns), figsize = (20,10))\n",
" for i, p in enumerate(plots):\n",
" x_loc = int(i/len(X.columns))\n",
" y_loc = i % len(X.columns)\n",
@@ -206,7 +114,7 @@
" rotation=90)\n",
" fig.savefig(\"/home/serenitas/edwin/PythonGraphs/dispersion_model.png\", bbox_inches='tight')\n",
"\n",
- " fig_1, axes_1 = plt.subplots(nrows=3, ncols=1, figsize = (15,8))\n",
+ " fig_1, axes_1 = plt.subplots(nrows=4, ncols=1, figsize = (15,8))\n",
" for i, p in enumerate(plots):\n",
" x_loc = int(i/len(X.columns))\n",
" plots[p].plot(ax=axes_1[x_loc], label=p[1], xlabel=\"\", legend=True)\n",
@@ -234,7 +142,9 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "tags": []
+ },
"outputs": [],
"source": [
"#Section 3----------------------------------------------------\n",
@@ -259,7 +169,9 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {},
+ "metadata": {
+ "tags": []
+ },
"outputs": [],
"source": [
"#plot the residuals\n",
@@ -289,69 +201,13 @@
"data = risk[['gini', 'index_duration', 'index_expected_loss']]\n",
"ols_model = smf.ols(\"gini ~ np.log(index_duration) + np.log(index_expected_loss)\", data=data).fit()\n"
]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": []
}
],
"metadata": {
"kernelspec": {
- "display_name": "Python 3.9.1 64-bit",
+ "display_name": "Python 3 (ipykernel)",
"language": "python",
- "name": "python39164bit6ddd573894c04d6a858a9a58880cc9d4"
+ "name": "python3"
},
"language_info": {
"codemirror_mode": {
@@ -363,7 +219,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.10.2"
+ "version": "3.10.8"
}
},
"nbformat": 4,
diff --git a/python/pnl_explain.py b/python/pnl_explain.py
index 1df6983b..18c4c5fc 100644
--- a/python/pnl_explain.py
+++ b/python/pnl_explain.py
@@ -159,7 +159,7 @@ def get_tranche_pv(
# add terminations
with conn.cursor() as c:
c.execute(
- "SELECT termination_fee, currency "
+ "SELECT termination_fee, cds.currency "
"FROM terminations JOIN cds USING (dealid) "
"WHERE termination_date=%s AND dealid LIKE 'SCCDS%%' AND fund=%s",
(prev_day, fund),
@@ -233,7 +233,7 @@ def get_tranche_pv2(
df = df[["clean_nav", "accrued"]]
with conn.cursor() as c:
c.execute(
- "SELECT termination_date AS date, cds.id, folder, currency, "
+ "SELECT termination_date AS date, cds.id, folder, cds.currency, "
"termination_fee AS principal "
"FROM terminations "
"JOIN cds USING (dealid) "
diff --git a/python/position_file_bowdst.py b/python/position_file_bowdst.py
index 7e2ecc07..36bc04e1 100644
--- a/python/position_file_bowdst.py
+++ b/python/position_file_bowdst.py
@@ -1,224 +1,52 @@
-import datetime
-import csv
-from io import StringIO
-from serenitas.utils.misc import rename_keys
-from serenitas.utils.remote import SftpClient
-from serenitas.utils.env import DAILY_DIR
-from pandas.tseries.offsets import MonthEnd
+from report_ops.sma import build_position_file
+import argparse
+from serenitas.utils.remote import Client
from serenitas.utils.exchange import ExchangeMessage, FileAttachment
-from csv_headers.globeop_upload import POSITION_HEADERS
+from report_ops.misc import _recipients, _cc_recipients
+import datetime
from serenitas.analytics.dates import prev_business_day
-_otc_queries = {
- "Tranche": "SELECT trb.trade_id, trb.serenitas_clean_nav + trb.serenitas_accrued as mtm, trb.notional * trb.tranche_factor as active_notional, cds.*, COALESCE(nextredindexcode, security_id) AS redindexcode FROM tranche_risk_bowdst trb left join cds on trade_id=id LEFT JOIN index_version_markit ivm ON security_id=redindexcode WHERE date=%s;",
- "CDXSwaption": "SELECT abs(spr.notional) AS active_notional, spr.serenitas_nav as nav, swaptions.*, index_version_markit.annexdate FROM list_swaption_positions_and_risks(%s, 'BOWDST') spr LEFT JOIN swaptions ON deal_id=dealid LEFT JOIN index_version_markit ON swaptions.security_id=redindexcode;",
- "IRSwaption": "SELECT abs(spr.notional) AS active_notional, spr.nav, swaptions.*, index_version_markit.effectivedate FROM list_ir_swaption_positions(%s, 'BOWDST') spr LEFT JOIN swaptions ON deal_id=dealid LEFT JOIN index_version_markit ON swaptions.security_id=redindexcode;",
- "CDX": "SELECT cds.*, ivm.effectivedate FROM list_cds_marks(%s, null, 'BOWDST') cds LEFT JOIN index_version_markit ivm ON security_id=redindexcode;",
-}
-
-
-def build_line(obj, asset_type):
- return [obj.get(h, None) for h in POSITION_HEADERS[asset_type]]
-
-
-def process_upload(positions, upload):
- attachments = []
- if upload:
- sftp = SftpClient.from_creds("hm_globeop", folder="incoming")
- for asset_type, trades in positions.items():
- buf = StringIO()
- csvwriter = csv.writer(buf)
- csvwriter.writerow(POSITION_HEADERS[asset_type])
- csvwriter.writerows(build_line(trade, asset_type) for trade in trades)
- buf = buf.getvalue().encode()
- timestamp = datetime.datetime.now()
- fname = f"HEDGEMARK.POSITION.BOS_PAT_BOWDOIN.{timestamp:%Y%m%d.%H%M%S}.{asset_type.capitalize()}Deal.PositionsAsOf{args.date}.csv"
- if upload:
- sftp.put(buf, fname)
- base_dir = DAILY_DIR / f"{timestamp:%Y-%m-%d}"
- base_dir.mkdir(exist_ok=True, parents=True)
- dest = base_dir / fname
- dest.write_bytes(buf)
- attachments.append(FileAttachment(name=fname, content=buf))
+def main(cob, fund, upload):
+ buf, dest = build_position_file(
+ cob,
+ fund,
+ )
if upload:
+ client = Client.from_creds("hm_globeop")
+ client.put(buf, dest.name)
em = ExchangeMessage()
- recipients = (
- "hm-operations@bnymellon.com",
- "hedgemark.lmcg.ops@sscinc.com",
- "Hedgemark.OTC@sscinc.com",
- "catherine.porter@bnymellon.com",
- "shkumar@sscinc.com",
- )
- cc_recipients = ("bowdoin-ops@lmcg.com",)
- subject = f"Position_files for Bowdoin Street as of {args.date}"
- body = f"Please see monthend positions for Bowdoin Street as of {args.date}. They have been uploaded to the SFTP as well."
em.send_email(
- subject, body, recipients, cc_recipients=cc_recipients, attach=attachments
- )
-
-
-def positions_bond(conn, date):
- with conn.cursor() as c:
- c.execute("SELECT * FROM risk_positions(%s, null, 'BOWDST') ", (date,))
- for row in c:
- obj = row._asdict()
- rename_keys(
- obj,
- {
- "identifier": "CUSIP",
- "description": "SecurityDescription",
- "notional": "Position",
- "price": "MarketPrice",
- "local_market_value": "Local Market Value",
- "usd_market_value": "Base Market Value",
- },
- )
- try:
- obj["Fx Rate"] = obj["Local Market Value"] / obj["Base Market Value"]
- except ZeroDivisionError:
- obj["Fx Rate"] = 1
- obj["AccountNumber"] = "319478"
- obj["Prime Broker"] = "BONY"
- obj["COB Date"] = date
- obj["Currency"] = "USD"
- obj["SecurityType"] = "Bond"
- obj["CurrentFace"] = obj["Position"] * obj["factor"]
- yield obj
-
-
-def positions_future(conn, date):
- with conn.cursor() as c:
- c.execute(
- "WITH tmp AS (SELECT bbg_ticker, fund, security_desc, currency, maturity, sum(quantity * (2*buysell::int-1)) OVER (PARTITION BY bbg_ticker, fund, security_desc, currency, maturity) notional FROM futures "
- "WHERE fund='BOWDST' AND trade_date <= %s) "
- "SELECT bbg_ticker, notional, code AS cp_code, cash_account, security_desc, currency, maturity FROM tmp LEFT JOIN accounts USING (fund) WHERE tmp.notional != 0 AND account_type='Future';",
- (date,),
+ subject=f"Position_files for Bowdoin Street as of {cob}",
+ body=f"Please see monthend positions for Bowdoin Street as of {cob}. They have been uploaded to the SFTP as well.",
+ to_recipients=_cc_recipients[fund],
+ cc_recipients=_cc_recipients[fund],
+ reply_to=_cc_recipients[fund],
+ attach=[FileAttachment(name=dest.name, content=buf)],
)
- for row in c:
- obj = row._asdict()
- rename_keys(
- obj,
- {
- "bbg_ticker": "BBGTicker",
- "notional": "Quantity",
- "cp_code": "Prime Broker",
- "cash_account": "AccountNumber",
- "security_desc": "SecurityDescription",
- "currency": "Currency",
- "maturity": "MaturityDate",
- },
- )
- obj["COB Date"] = date
- obj["SecurityType"] = "Futures"
- yield obj
-def _otc_serialize(obj, product_type, date):
- rename_keys(
- obj,
- {
- "dealid": "Unique Deal ID",
- "cp_code": "Counterparty",
- "currency": "DealCurrencyA",
- "active_notional": "NotionalA",
- "fixed_rate": "FixedRate",
- "trade_date": "Start Date",
- "effective_date": "EffectiveDate",
- "maturity": "Maturity Date",
- "security_id": "Underlying (ISIN / CUSP / RED CODES)",
- "security_desc": "Underlying Desc",
- "mtm": "MTM Valuation",
- "strike": "Strike",
- "annexdate": "EffectiveDate",
- "expiration_date": "Underlying Maturity",
- "nav": "MTM Valuation",
- },
- )
- data = {
- "Client Name": "HEDGEMARK",
- "Fund Name": "BOS_PAT_BOWDOIN",
- "Product Type": "Credit Index Tranche",
- "MTM Currency": "USD",
- "COB Date": date,
- }
- obj.update(data)
- if product_type == "Tranche":
- obj["Underlying (ISIN / CUSP / RED CODES)"] = obj["redindexcode"]
- obj["Product Type"] = "Credit Index Tranche"
- obj["TransactionIndicator (Buy/Sell)"] = (
- "B" if obj["protection"] == "Buyer" else "S"
- )
- elif product_type in ("CDXSwaption", "IRSwaption"):
- obj["Product Type"] = (
- "CD Swaption" if product_type == "CDXSwaption" else "Swaption"
- )
- obj["TransactionIndicator (Buy/Sell)"] = "B" if obj["buysell"] else "S"
- obj["PutCall Indicator (Call/Put)"] = (
- "P" if obj["option_type"] == "PAYER" else "C"
- )
- obj["Exercise Type"] = "European"
- elif product_type == "CDX":
- obj["Product Type"] = "Credit Index"
- obj["Counterparty"] = "GS"
- obj["Unique Deal ID"] = obj[
- "Underlying (ISIN / CUSP / RED CODES)"
- ] # Different from rest, we will override
- obj["TransactionIndicator (Buy/Sell)"] = "B" if obj["notional"] > 0 else "S"
- obj["DealCurrencyA"] = "EUR" if obj["index"] in ("EU", "XO") else "USD"
- obj["NotionalA"] = abs(obj["notional"]) * obj["factor"]
- obj["Start Date"] = date
- obj["MTM Valuation"] = obj["clean_nav"] + obj["accrued"]
- obj["Clearing House Name"] = "ICE"
- obj["FixedRate"] = obj["coupon"] * 100
- obj["Effective Date"] = obj["effectivedate"]
- return obj
-
-
-def positions_otc(conn, date):
- with conn.cursor() as c:
- for product_type, sql_query in _otc_queries.items():
- c.execute(sql_query, (date,))
- for row in c:
- yield _otc_serialize(row._asdict(), product_type, date)
-
-
-if __name__ == "__main__":
- import argparse
- from serenitas.utils.db import dbconn
-
- parser = argparse.ArgumentParser(
- description="Generate position files for Bowdoin Street"
- )
- parser.add_argument(
- "date",
- nargs="?",
- type=datetime.date.fromisoformat,
- default=prev_business_day((datetime.date.today().replace(day=1))),
- )
- parser.add_argument(
- "--product",
- nargs="+",
- choices=["bond", "future", "otc"],
- default=["bond", "future", "otc"],
- help="list of products to generate position files for",
- )
- parser.add_argument(
- "--no-upload",
- "-n",
- action="store_true",
- default=False,
- help="uploads to globeop",
- )
- args = parser.parse_args()
- conn = dbconn("dawndb")
- positions = {
- p: list(globals()[f"positions_{p}"](conn, args.date)) for p in args.product
- }
- if (
- not prev_business_day(datetime.date.today()) == args.date and not args.no_upload
- ): # We only want to upload if the previous business day was monthend
- pass
- else:
- process_upload(positions, not args.no_upload)
+parser = argparse.ArgumentParser(
+ description="Generate position files for Bowdoin Street"
+)
+parser.add_argument(
+ "date",
+ nargs="?",
+ type=datetime.date.fromisoformat,
+ default=prev_business_day((datetime.date.today().replace(day=1))),
+)
+parser.add_argument(
+ "--no-upload",
+ "-n",
+ action="store_true",
+ default=False,
+ help="uploads to globeop",
+)
+args = parser.parse_args()
+if (
+ not prev_business_day(datetime.date.today()) == args.date and not args.no_upload
+): # We only want to upload if the previous business day was monthend
+ pass
+else:
+ main(args.date, "BOWDST", not args.no_upload)
diff --git a/python/position_file_isosel.py b/python/position_file_isosel.py
deleted file mode 100644
index f653aed2..00000000
--- a/python/position_file_isosel.py
+++ /dev/null
@@ -1,227 +0,0 @@
-import datetime
-import csv
-from io import StringIO
-from serenitas.utils.misc import rename_keys
-from serenitas.utils.remote import SftpClient
-from serenitas.utils.env import DAILY_DIR
-from pandas.tseries.offsets import MonthEnd
-from serenitas.utils.exchange import ExchangeMessage, FileAttachment
-from serenitas.analytics.dates import prev_business_day
-
-
-def process_upload(trades, upload):
- buf = StringIO()
- csvwriter = csv.writer(buf)
- csvwriter.writerow(ISOSEL_HEADERS)
- csvwriter.writerows(build_line(trade) for trade in trades)
- buf = buf.getvalue().encode()
- fname = f"Innocap_ISOSEL_positions_{args.date}.csv"
- if upload:
- innocap_sftp = SftpClient.from_creds("innocap", folder="Innocap")
- innocap_sftp.put(buf, fname)
- citco_sftp = SftpClient.from_creds("citco", folder="incoming")
- citco_sftp.put(buf, fname)
- base_dir = DAILY_DIR / str(datetime.date.today())
- base_dir.mkdir(exist_ok=True, parents=True)
- dest = base_dir / fname
- dest.write_bytes(buf)
- return fname, buf
-
-
-def build_line(obj):
- return [obj.get(h, None) for h in ISOSEL_HEADERS]
-
-
-ISOSEL_HEADERS = [
- "Client Name",
- "Fund Name",
- "Counterparty",
- "AccountNumber",
- "COB Date",
- "SecurityDescription",
- "Prime Broker",
- "Product Type",
- "Unique Deal ID",
- "TransactionIndicator (Buy/Sell)",
- "PutCall Indicator (Call/Put)",
- "CapFloorIndicator",
- "CurrencyPair",
- "DealCurrencyA",
- "DealCurrencyB",
- "NotionalA",
- "NotionalB",
- "OriginalPrice",
- "Strike",
- "FixedRate",
- "Quantity",
- "Start Date",
- "Effective Date",
- "Maturity Date",
- "Underlying Maturity",
- "RecPayFixed",
- "Underlying (ISIN / CUSP / RED CODES)",
- "Underlying Desc",
- "Exercise Type",
- "MTM Currency",
- "MTM Valuation",
- "MarketPrice",
- "COB Date",
- "Clearing House Name",
-]
-_sql_query = {
- "bond": "SELECT * FROM risk_positions(%s, null, 'ISOSEL') ",
- "future": (
- "WITH tmp AS (SELECT bbg_ticker, fund, security_desc, currency, maturity, sum(quantity * (2*buysell::int-1)) OVER (PARTITION BY bbg_ticker, fund, security_desc, currency, maturity) notional FROM futures "
- "WHERE fund='ISOSEL' AND trade_date <= %s) "
- "SELECT bbg_ticker, notional, code AS cp_code, cash_account, security_desc, currency, maturity FROM tmp LEFT JOIN accounts USING (fund) WHERE tmp.notional != 0 AND account_type='Future';"
- ),
- "tranche": "SELECT trb.trade_id, trb.serenitas_clean_nav + trb.serenitas_accrued as mtm, trb.notional * trb.tranche_factor as active_notional, cds.* FROM tranche_risk_isosel trb left join cds on trade_id=id WHERE date=%s",
- "cdx_swaption": "SELECT abs(spr.notional) AS active_notional, spr.serenitas_nav, swaptions.*, index_version_markit.annexdate FROM list_swaption_positions_and_risks(%s, 'ISOSEL') spr LEFT JOIN swaptions ON deal_id=dealid LEFT JOIN index_version_markit ON swaptions.security_id=redindexcode;",
- "ir_swaption": "SELECT abs(spr.notional) AS active_notional, spr.nav as serenitas_nav, swaptions.*, index_version_markit.effectivedate FROM list_ir_swaption_positions(%s, 'ISOSEL') spr LEFT JOIN swaptions ON deal_id=dealid LEFT JOIN index_version_markit ON swaptions.security_id=redindexcode;",
- "cdx": "SELECT cds.*, ivm.effectivedate FROM list_cds_marks(%s, null, 'ISOSEL') cds LEFT JOIN index_version_markit ivm ON security_id=redindexcode;",
-}
-
-
-def _base_attrs(asset_class, date, obj):
- obj["Client Name"] = "INNOCAP"
- obj["Fund Name"] = "ISOSEL"
- obj["COB Date"] = date
- obj["Product Type"] = asset_class
- obj["MTM Currency"] = "USD"
- match asset_class:
- case "bond":
- rename_keys(
- obj,
- {
- "identifier": "Underlying (ISIN / CUSP / RED CODES)",
- "description": "SecurityDescription",
- "notional": "NotionalA",
- "price": "MarketPrice",
- "local_market_value": "Local Market Value",
- "usd_market_value": "MTM Valuation",
- },
- )
- obj["Account Number"] = "NT"
- obj["Prime Broker"] = "NT"
- obj["DealCurrencyA"] = "USD"
- case "future":
- rename_keys(
- obj,
- {
- "bbg_ticker": "Underlying (ISIN / CUSP / RED CODES)",
- "notional": "NotionalA",
- "cp_code": "Prime Broker",
- "cash_account": "AccountNumber",
- "security_desc": "SecurityDescription",
- "currency": "DealCurrencyA",
- "maturity": "MaturityDate",
- },
- )
- case "tranche":
- obj["TransactionIndicator (Buy/Sell)"] = (
- "B" if obj["protection"] == "Buyer" else "S"
- )
- rename_keys(
- obj,
- {
- "dealid": "Unique Deal ID",
- "cp_code": "Counterparty",
- "currency": "DealCurrencyA",
- "active_notional": "NotionalA",
- "fixed_rate": "FixedRate",
- "trade_date": "Start Date",
- "effective_date": "EffectiveDate",
- "maturity": "Maturity Date",
- "security_id": "Underlying (ISIN / CUSP / RED CODES)",
- "security_desc": "Underlying Desc",
- "mtm": "MTM Valuation",
- },
- )
- case "ir_swaption" | "cdx_swaption":
- rename_keys(
- obj,
- {
- "dealid": "Unique Deal ID",
- "cp_code": "Counterparty",
- "currency": "DealCurrencyA",
- "active_notional": "NotionalA",
- "fixed_rate": "FixedRate",
- "strike": "Strike",
- "effectivedate": "Effective Date",
- "trade_date": "Start Date",
- "maturity": "Maturity Date",
- "expiration_date": "Underlying Maturity",
- "security_id": "Underlying (ISIN / CUSP / RED CODES)",
- "security_desc": "Underlying Desc",
- "nav": "MTM Valuation",
- },
- )
- obj["TransactionIndicator (Buy/Sell)"] = "B" if obj["buysell"] else "S"
- obj["PutCall Indicator (Call/Put)"] = (
- "P" if obj["option_type"] == "PAYER" else "C"
- )
- obj["Exercise Type"] = "European"
- case "cdx":
- obj["Counterparty"] = "BOA_FC"
- obj["Unique Deal ID"] = obj["security_id"]
- obj["TransactionIndicator (Buy/Sell)"] = "B" if obj["notional"] > 0 else "S"
- obj["DealCurrencyA"] = "EUR" if obj["index"] in ("EU", "XO") else "USD"
- obj["NotionalA"] = abs(obj["notional"]) * obj["factor"]
- obj["Start Date"] = date
- obj["MTM Valuation"] = obj["clean_nav"] + obj["accrued"]
- obj["Clearing House Name"] = "ICE"
- obj["FixedRate"] = obj["coupon"] * 100
- rename_keys(
- obj,
- {
- "effectivedate": "Effective Date",
- "maturity": "Maturity Date",
- "security_id": "Underlying (ISIN / CUSP / RED CODES)",
- "security_desc": "Underlying Desc",
- },
- )
- return obj
-
-
-def main(conn, date, upload):
- trades = []
- with conn.cursor() as c:
- for asset_class in (
- "bond",
- "future",
- "tranche",
- "cdx_swaption",
- "ir_swaption",
- "cdx",
- ):
- c.execute(_sql_query[asset_class], (date,))
- for row in c:
- obj = row._asdict()
- obj = _base_attrs(asset_class, date, obj)
- trades.append(obj)
- process_upload(trades, upload=True)
-
-
-if __name__ == "__main__":
- import argparse
- from serenitas.utils.db import dbconn
-
- parser = argparse.ArgumentParser(
- description="Generate position files for ISOSEL Street"
- )
- parser.add_argument(
- "date",
- nargs="?",
- type=datetime.date.fromisoformat,
- default=prev_business_day(datetime.date.today()),
- )
- parser.add_argument(
- "--no-upload",
- "-n",
- action="store_true",
- default=False,
- help="uploads to citco and innocap",
- )
- args = parser.parse_args()
- conn = dbconn("dawndb")
- main(conn, args.date, not args.no_upload)
diff --git a/python/report_ops/__main__.py b/python/report_ops/__main__.py
index 68708109..194dd536 100644
--- a/python/report_ops/__main__.py
+++ b/python/report_ops/__main__.py
@@ -1,9 +1,19 @@
from serenitas.analytics.dates import prev_business_day
-from serenitas.utils.db import dbconn
+from serenitas.utils.exchange import ExchangeMessage
import logging
import argparse
import datetime
-from .sma import IsoselSMA, BowdstSMA
+from .sma import (
+ IsoselSMA,
+ BowdstSMA,
+ PositionReport,
+ BondPosition,
+ FuturePosition,
+ TranchePosition,
+ CDXPosition,
+ IRSwaptionPosition,
+ CDXSwaptionPosition,
+)
from .cash import NTCashReport, UMBCashReport, BNYCashReport
from .admin import AccruedReport, AllReport
from .wires import BowdstWire, NTWire
@@ -92,9 +102,12 @@ if args.wire_reports:
logger.info(e)
if args.send_to_custodians:
- conn = dbconn("dawndb")
- for account in ("UMB", "BBH"):
+ em = ExchangeMessage()
+ for account in (
+ "BBH",
+ "UMB",
+ ):
try:
- upload_to_custodian(account, args.date, conn, not args.no_upload)
+ upload_to_custodian(account, args.date, not args.no_upload, em)
except ValueError as e:
logger.info(e)
diff --git a/python/report_ops/custodians.py b/python/report_ops/custodians.py
index a82f2b15..e225bb4d 100644
--- a/python/report_ops/custodians.py
+++ b/python/report_ops/custodians.py
@@ -1,49 +1,72 @@
-from serenitas.utils.exchange import ExchangeMessage
+from serenitas.utils.exchange import ExchangeMessage, FileAttachment
from serenitas.utils.env import DAILY_DIR
import warnings
import datetime
-from .misc import get_dir
+from .misc import get_dir, _recipients, _cc_recipients
import gpg
from serenitas.ops.trade_dataclasses import BondDeal
from serenitas.ops.funds import Service
from typing import ClassVar
from dataclasses import dataclass
-_sql = (
- "INSERT INTO bond_csv_upload (allocationid, identifier, principal, interest) SELECT id, identifier, principal_payment, "
- "accrued_payment FROM bond_trades WHERE trade_date=%s AND account=%s AND tradeid IS NOT NULL ON CONFLICT DO NOTHING RETURNING allocationid;"
-)
-_bond_query = "SELECT * FROM bond_trades WHERE id in %s;"
-
-
-def upload_to_custodian(account, trade_date, conn, upload):
- _fund = {"BBH": "BRINKER", "UMB": "UMB"}
- custodian = Service[_fund[account]]
- with conn.cursor() as c:
+def upload_to_custodian(account, trade_date, upload, em):
+ _service = {"BBH": "BRINKER", "UMB": "UMB"}
+ custodian = Service[_service[account]]
+ conn = BondDeal._conn
+ with conn.cursor() as c, conn.cursor() as d:
c.execute(
- _sql,
+ "SELECT * FROM bond_trades WHERE trade_date=%s AND account=%s",
(
trade_date,
- "BAC" if account == "UMB" else account,
+ account,
),
)
- tids = tuple(row.allocationid for row in c)
- if not tids:
- return
- c.execute(_bond_query, (tids,))
for row in c:
- trade = BondDeal.from_dict(**row._asdict(), scaled=True)
- match account:
- case "BBH":
- custodian.staging_queue.append(trade.to_bbh("NEW"))
- case "UMB":
- custodian.staging_queue.append(trade.to_umb("NEW"))
+ d.execute(
+ "SELECT identifier, principal_payment, accrued_payment FROM bond_csv_upload WHERE allocationid=%s FOR UPDATE",
+ (row.id,),
+ )
+ if old_row := d.fetchone():
+ if any(
+ [
+ old_row.identifier != row.identifier,
+ abs(old_row.principal_payment - row.principal_payment) > 1e-2,
+ abs(old_row.accrued_payment, row.accrued_payment) > 1e-2,
+ ]
+ ):
+ old_trade = BondDeal.from_dict(
+ row._asdict() | old_row._asdict(), scaled=True
+ )
+ custodian.push_trade(old_trade, "CANCEL")
+ d.execute(
+ "UPDATE bond_csv_upload SET identifier=%s, principal_payment=%s, accrued_payment=%s WHERE allocationid=%s",
+ (
+ row.identifier,
+ row.principal_payment,
+ row.accrued_payment,
+ row.id,
+ ),
+ )
+ else:
+ continue
+ trade = BondDeal.from_dict(row._asdict(), scaled=True)
+ custodian.push_trade(trade, "NEW")
+ if not custodian.staging_queue:
+ return
buf, dest = custodian.build_buffer("bond")
custodian.staging_queue.clear()
conn.commit()
if upload:
custodian.upload(buf, dest.name, confirm=account != "UMB")
+ em.send_email(
+ f"{account}: Bond Positions Uploaded for {trade_date}",
+ "Hi, \nWe've just uploaded the positions via SFTP. File receipt attached to this email",
+ _recipients.get(account, _cc_recipients[custodian.name]),
+ cc_recipients=_cc_recipients[custodian.name],
+ reply_to=_cc_recipients[custodian.name],
+ attach=(FileAttachment(name=dest.name, content=buf),),
+ )
@dataclass
@@ -144,7 +167,3 @@ class BNY(Custodian, account="BONY2"):
p.parent.mkdir(parents=True, exist_ok=True)
if not p.exists():
p.write_bytes(attach.content)
-
-
-class BBH(Custodian, account="BBH"):
- pass
diff --git a/python/report_ops/headers.py b/python/report_ops/headers.py
new file mode 100644
index 00000000..b563e1d6
--- /dev/null
+++ b/python/report_ops/headers.py
@@ -0,0 +1,36 @@
+POSITION_HEADERS = [
+ "Client Name",
+ "Fund Name",
+ "Counterparty",
+ "AccountNumber",
+ "COB Date",
+ "SecurityDescription",
+ "Prime Broker",
+ "Product Type",
+ "Unique Deal ID",
+ "TransactionIndicator (Buy/Sell)",
+ "PutCall Indicator (Call/Put)",
+ "CapFloorIndicator",
+ "CurrencyPair",
+ "DealCurrencyA",
+ "DealCurrencyB",
+ "NotionalA",
+ "NotionalB",
+ "OriginalPrice",
+ "Strike",
+ "FixedRate",
+ "Quantity",
+ "Start Date",
+ "Effective Date",
+ "Maturity Date",
+ "Underlying Maturity",
+ "RecPayFixed",
+ "Underlying (ISIN / CUSP / RED CODES)",
+ "Underlying Desc",
+ "Exercise Type",
+ "MTM Currency",
+ "MTM Valuation",
+ "MarketPrice",
+ "COB Date",
+ "Clearing House Name",
+]
diff --git a/python/report_ops/misc.py b/python/report_ops/misc.py
index 6d435efe..da7d61ac 100644
--- a/python/report_ops/misc.py
+++ b/python/report_ops/misc.py
@@ -12,13 +12,20 @@ _recipients = {
"mbisoye@sscinc.com",
"hedgemark.lmcg.ops@sscinc.com",
"hm-operations@bnymellon.com",
+ "Hedgemark.OTC@sscinc.com",
),
"SERCGMAST": (
"SERENITAS.FA@sscinc.com",
"SERENITAS.ops@sscinc.com",
),
"BAML_FCM": ("footc_margin_csr_amrs@bofa.com",),
+ "GS_FCM": (
+ "Susan.Olesky@ny.email.gs.com",
+ "Divyanshi.Girotra@gs.com",
+ "gs-margin-calls-dcs@ny.email.gs.com",
+ ),
"NYOPS": ("nyops@lmcg.com",),
+ "UMB": ("lmcgcustody@umb.com",),
}
_sma_recipients = {
@@ -38,10 +45,13 @@ _cc_recipients = {
"ISOSEL": ("selene-ops@lmcg.com",),
"BOWDST": ("bowdoin-ops@lmcg.com",),
"SERCGMAST": ("nyops@lmcg.com",),
+ "BRINKER": ("nyops@lmcg.com",),
}
-def get_dir(workdate: datetime.date, archived=True) -> pathlib.Path:
+def get_dir(
+ workdate: datetime.date = datetime.date.today(), archived=True
+) -> pathlib.Path:
p = DAILY_DIR / str(workdate) / "Reports"
if not p.exists() and archived:
p = (
diff --git a/python/report_ops/sma.py b/python/report_ops/sma.py
index 9a3bf77a..83d400c5 100644
--- a/python/report_ops/sma.py
+++ b/python/report_ops/sma.py
@@ -1,12 +1,39 @@
import datetime
-from dataclasses import dataclass
+from dataclasses import dataclass, field
from serenitas.utils.db import dbconn
from serenitas.utils.exchange import ExchangeMessage
-from .misc import _sma_recipients, _cc_recipients
+from serenitas.utils.misc import rename_keys
+from serenitas.ops.trade_dataclasses import Deal
+from .misc import _sma_recipients, _cc_recipients, get_dir
from exchangelib import FileAttachment
import pandas as pd
from io import StringIO
from typing import ClassVar
+from .headers import POSITION_HEADERS
+from io import StringIO
+import csv
+from serenitas.utils.env import DAILY_DIR
+
+
+def build_position_file(
+ cob,
+ fund,
+ asset_classes: list = [
+ "bond",
+ "future",
+ "tranche",
+ "ir_swaption",
+ "cdx_swaption",
+ "irs",
+ "cdx",
+ ],
+):
+ for asset_class in asset_classes:
+ for position in PositionReport[asset_class].gen_positions(cob, fund):
+ PositionReport.staging_queue.append(position.to_position())
+ buf, dest = PositionReport.build_buffer(cob, fund)
+ PositionReport.staging_queue.clear()
+ return buf, dest
@dataclass
@@ -71,3 +98,250 @@ class IsoselSMA(SMA, fund="ISOSEL"):
class BowdstSMA(SMA, fund="BOWDST"):
pass
+
+
+_sql_query = {
+ "bond": "SELECT * FROM risk_positions(%s, null, %s) ",
+ "future": (
+ "WITH tmp AS (SELECT bbg_ticker, fund, security_desc, currency, maturity, account_code, dealid, buysell, sum(quantity * (2*buysell::int-1)) OVER (PARTITION BY bbg_ticker, fund, security_desc, currency, maturity) notional FROM futures "
+ "WHERE trade_date <= %s AND fund=%s) "
+ "SELECT bbg_ticker, notional, code AS cp_code, cash_account, security_desc, currency, maturity, account_code, dealid, buysell FROM tmp LEFT JOIN accounts USING (fund) WHERE tmp.notional != 0 AND account_type='Future';"
+ ),
+ "tranche": "SELECT trb.trade_id, trb.serenitas_clean_nav + trb.serenitas_accrued as mtm, trb.notional * trb.tranche_factor as active_notional, cds.* FROM tranche_risk_isosel trb left join cds on trade_id=id WHERE date=%s",
+ "cdx_swaption": "SELECT abs(spr.notional) AS active_notional, spr.serenitas_nav, swaptions.*, index_version_markit.annexdate FROM list_swaption_positions_and_risks(%s, %s) spr LEFT JOIN swaptions ON deal_id=dealid LEFT JOIN index_version_markit ON swaptions.security_id=redindexcode;",
+ "ir_swaption": "SELECT abs(spr.notional) AS active_notional, spr.nav as serenitas_nav, swaptions.*, index_version_markit.effectivedate FROM list_ir_swaption_positions(%s, %s) spr LEFT JOIN swaptions ON deal_id=dealid LEFT JOIN index_version_markit ON swaptions.security_id=redindexcode;",
+ "cdx": "SELECT cds.*, ivm.effectivedate FROM list_cds_marks(%s, null, %s) cds LEFT JOIN index_version_markit ivm ON security_id=redindexcode;",
+ "irs": "SELECT isr.pv, irs.*, accounts2.name FROM ir_swap_risk isr LEFT JOIN irs ON id=swp_id LEFT JOIN accounts2 USING (cash_account) WHERE date=%s AND irs.fund=%s;",
+}
+
+_fund_custodian = {"BOWDST": "BONY2", "ISOSEL": "NT"}
+_fund_client = {"BOWDST": "Hedgemark", "ISOSEL": "Innocap"}
+_fund_fcm = {"BOWDST": "GS_FCM", "ISOSEL": "BOA_FC"}
+
+
+def get_path(cob, fund):
+ match fund:
+ case "ISOSEL":
+ filepath_pattern = "Innocap_{fund}_positions_{cob:%Y-%m-%d}.csv"
+ case _:
+ filepath_pattern = "{fund}_positions_{cob:%Y%m%d}.csv"
+ return get_dir() / filepath_pattern.format(fund=fund, cob=cob)
+
+
+@dataclass
+class PositionReport(Deal, deal_type=None, table_name=None):
+ client_name: str = field(metadata={"position": "Client Name"})
+ fund: str = field(metadata={"position": "Fund Name"})
+ cp_code: str = field(metadata={"position": "Counterparty"})
+ dealid: str = field(metadata={"position": "Unique Deal ID"})
+ buysell: bool
+ currency: str = field(metadata={"position": "DealCurrencyA"})
+ notional: float = field(metadata={"position": "NotionalA"})
+ cob: datetime.date = field(metadata={"position": "COB Date"})
+ identifier: str = field(
+ metadata={"position": "Underlying (ISIN / CUSP / RED CODES)"}
+ )
+ start_date: datetime.date = field(default=None, metadata={"position": "Start Date"})
+ effective_date: datetime.date = field(
+ default=None, metadata={"position": "Effective Date"}
+ )
+ maturity: datetime.date = field(
+ default=None, metadata={"position": "Maturity Date"}
+ )
+ description: str = field(default=None, metadata={"position": "Underlying Desc"})
+ local_market_value: str = field(
+ default=None, metadata={"position": "Local Market Value"}
+ )
+ mtm_currency: str = field(default=None, metadata={"position": "MTM Currency"})
+ mtm_valuation: float = field(default=None, metadata={"position": "MTM Valuation"})
+ fixed_rate: float = field(default=None, metadata={"position": "FixedRate"})
+ putcall: bool = None
+ strike: float = field(default=None, metadata={"position": "Strike"})
+ underlying_maturity: datetime.date = field(
+ default=None, metadata={"position": "Underlying Maturity"}
+ )
+ exercise_type: str = field(default=None, metadata={"position": "Exercise Type"})
+ clearing_house: str = field(
+ default=None, metadata={"position": "Clearing House Name"}
+ )
+ account: str = field(default=None, metadata={"position": "AccountNumber"})
+ primebroker: str = field(default=None, metadata={"position": "Prime Broker"})
+ price: float = field(default=None, metadata={"position": "MarketPrice"})
+ staging_queue: ClassVar = []
+ asset_class: ClassVar[str] = field(metadata={"position": "Product Type"})
+ _query: ClassVar[str]
+
+ def __init_subclass__(cls, asset_class, **kwargs):
+ cls.asset_class = asset_class
+ cls._query = _sql_query[asset_class]
+ cls._registry[asset_class] = cls
+
+ @classmethod
+ def gen_positions(cls, cob, fund):
+ with cls._conn.cursor() as c:
+ params = (cob, fund) if cls not in (TranchePosition,) else (cob,)
+ c.execute(cls._query, params)
+ for row in c:
+ yield cls.from_query(row._asdict(), cob, fund)
+
+ @classmethod
+ def build_buffer(cls, cob, fund):
+ buf = StringIO()
+ csvwriter = csv.writer(buf)
+ csvwriter.writerow(POSITION_HEADERS)
+ csvwriter.writerows(
+ [[obj.get(h) for h in POSITION_HEADERS] for obj in cls.staging_queue]
+ )
+ buf = buf.getvalue().encode()
+ dest = get_path(cob, fund)
+ dest.parent.mkdir(exist_ok=True)
+ dest.write_bytes(buf)
+ return buf, dest
+
+ def from_query(d, cob, fund):
+ d["client_name"] = _fund_client[fund]
+ d["fund"] = fund
+ d["cob"] = cob
+ return d
+
+ def to_position(self):
+ obj = self.serialize("position")
+ obj["Product Type"] = self.asset_class
+ match self.asset_class:
+ case "irs":
+ obj["TransactionIndicator (Buy/Sell)"] = (
+ "Pay Fixed" if self.buysell else "Receive Fixed"
+ )
+ case _:
+ obj["TransactionIndicator (Buy/Sell)"] = (
+ "Buy" if self.buysell else "Sell"
+ )
+ return obj
+
+
+class BondPosition(PositionReport, asset_class="bond"):
+ @classmethod
+ def from_query(cls, d: dict, cob, fund):
+ d = super().from_query(d, cob, fund)
+ rename_keys(
+ d,
+ {
+ "usd_market_value": "mtm_valuation",
+ },
+ )
+ for key in ("account", "primebroker", "cp_code"):
+ d[key] = _fund_custodian[fund]
+ d["dealid"] = "Aggregated"
+ d["buysell"] = True
+ d["currency"] = "USD"
+ return cls.from_dict(**d)
+
+
+class FuturePosition(PositionReport, asset_class="future"):
+ @classmethod
+ def from_query(cls, d: dict, cob, fund):
+ d = super().from_query(d, cob, fund)
+ rename_keys(
+ d,
+ {
+ "bbg_ticker": "identifier",
+ "cash_account": "account",
+ "security_desc": "description",
+ "account_code": "primebroker",
+ },
+ )
+ return cls.from_dict(**d)
+
+
+class TranchePosition(PositionReport, asset_class="tranche"):
+ @classmethod
+ def from_query(cls, d: dict, cob, fund):
+ d = super().from_query(d, cob, fund)
+ rename_keys(
+ d,
+ {
+ "active_notional": "notional",
+ "trade_date": "start_date",
+ "security_desc": "description",
+ "mtm": "mtm_valuation",
+ "security_id": "identifier",
+ },
+ )
+ d["primebroker"] = "Bilateral"
+ d["buysell"] = d["protection"] == "Buyer"
+ return cls.from_dict(**d)
+
+
+class SwaptionPosition:
+ @classmethod
+ def from_query(cls, d: dict, cob, fund):
+ d = super().from_query(d, cob, fund)
+ rename_keys(
+ d,
+ {
+ "active_notional": "notional",
+ "trade_date": "start_date",
+ "effectivedate": "effective_date",
+ "nav": "MTM Valuation",
+ "expiration_date": "Underlying Maturity",
+ "security_id": "identifier",
+ "security_desc": "description",
+ },
+ )
+ d["primebroker"] = "Bilateral"
+ return cls.from_dict(**d)
+
+
+class IRSwaptionPosition(SwaptionPosition, PositionReport, asset_class="ir_swaption"):
+ pass
+
+
+class CDXSwaptionPosition(SwaptionPosition, PositionReport, asset_class="cdx_swaption"):
+ pass
+
+
+class CDXPosition(PositionReport, asset_class="cdx"):
+ @classmethod
+ def from_query(cls, d: dict, cob, fund):
+ d = super().from_query(d, cob, fund)
+ rename_keys(
+ d,
+ {
+ "effectivedate": "effective_date",
+ "security_desc": "description",
+ "security_id": "identifier",
+ "name": "primebroker",
+ },
+ )
+ d["FixedRate"] = d["coupon"] * 100
+ d["buysell"] = d["notional"] > 0
+ d["notional"] = abs(d["notional"]) * d["factor"]
+ d["mtm"] = d["clean_nav"] + d["accrued"]
+ d["cp_code"] = _fund_fcm[fund]
+ d["primebroker"] = _fund_fcm[fund]
+ d["currency"] = "EUR" if d["index"] in ("EU", "XO") else "USD"
+ d["clearing_house"] = "ICE"
+ d["dealid"] = "Aggregated"
+ return cls.from_dict(**d)
+
+
+class IRSPosition(PositionReport, asset_class="irs"):
+ @classmethod
+ def from_query(cls, d: dict, cob, fund):
+ d = super().from_query(d, cob, fund)
+ rename_keys(
+ d,
+ {
+ "trade_date": "start_date",
+ "effectivedate": "effective_date",
+ "pv": "mtm_valuation",
+ "maturity_date": "maturity",
+ "float_index": "identifier",
+ "swap_type": "description",
+ "payreceive": "buysell",
+ "cash_account": "account",
+ },
+ )
+ d["clearing_house"] = "ICE"
+ d["primebroker"] = _fund_fcm[fund]
+ return cls.from_dict(**d)
diff --git a/python/risk/__main__.py b/python/risk/__main__.py
index 3ba9f077..57534d02 100644
--- a/python/risk/__main__.py
+++ b/python/risk/__main__.py
@@ -3,12 +3,15 @@ import os
import serenitas.analytics
import argparse
import datetime
+from quantlib.time.api import Date
+from quantlib.time.calendars.united_states import UnitedStates, Market
from serenitas.utils.db import dbengine
from serenitas.utils.db2 import dbconn
from serenitas.utils.pool import dawn_pool
from .bonds import subprime_risk, clo_risk, crt_risk, insert_subprime_risk
from serenitas.analytics.base import Trade
from serenitas.analytics.dates import prev_business_day
+from serenitas.analytics.utils import run_local
from .indices import insert_curve_risk, insert_index_risk
from .ir_swap import insert_ir_swap_portfolio
from .ir_swaption import insert_ir_swaption_portfolio
@@ -41,6 +44,8 @@ mysql_engine = dbengine("rmbs_model")
mysqlcrt_engine = dbengine("crt")
funds = ("SERCGMAST", "BOWDST", "BRINKER", "ISOSEL")
+
+us_cal = UnitedStates(Market.FederalReserve)
with dawn_pool.connection() as conn:
for fund in funds:
insert_curve_risk(
@@ -49,13 +54,15 @@ with dawn_pool.connection() as conn:
fund,
("SER_IGCURVE", "SER_ITRXCURVE", "XCURVE", "SER_HYCURVE"),
)
- ir_swaption_portf = IRSwaption.get_portfolio(workdate, fund=fund)
- insert_ir_swaption_portfolio(ir_swaption_portf, conn)
- ir_swap_portf = SofrSwap.get_portfolio(workdate, fund=fund)
- insert_ir_swap_portfolio(ir_swap_portf, conn)
+ if us_cal.is_business_day(Date.from_datetime(workdate)):
+ ir_swaption_portf = IRSwaption.get_portfolio(workdate, fund=fund)
+ insert_ir_swaption_portfolio(ir_swaption_portf, conn)
+ ir_swap_portf = SofrSwap.get_portfolio(workdate, fund=fund)
+ insert_ir_swap_portfolio(ir_swap_portf, conn)
insert_index_risk(workdate, conn, fund)
portf = get_tranche_portfolio(workdate, conn, funds=funds)
- insert_tranche_pnl_explain(portf, conn)
+ with run_local():
+ insert_tranche_pnl_explain(portf, conn)
insert_tranche_risk(portf, conn)
portf = get_swaption_portfolio(workdate, conn, source_list=["MS"])
insert_swaption_portfolio(portf, conn)
diff --git a/python/risk/portfolio.py b/python/risk/portfolio.py
index b0a69056..88e10f26 100644
--- a/python/risk/portfolio.py
+++ b/python/risk/portfolio.py
@@ -38,7 +38,7 @@ def build_portfolio(position_date, value_date=None, fund="SERCGMAST"):
Trade.init_ontr(value_date)
with dawn_pool.connection() as conn:
- portf = get_tranche_portfolio(position_date, conn, False, fund)
+ portf = get_tranche_portfolio(position_date, conn, False, [fund])
swaption_portf = get_swaption_portfolio(position_date, conn, fund=fund)
swaption_portf.trade_ids = [tid[:-1] for tid in swaption_portf.trade_ids]
portf += swaption_portf
diff --git a/python/risk/tranches.py b/python/risk/tranches.py
index 134baa24..01c08951 100644
--- a/python/risk/tranches.py
+++ b/python/risk/tranches.py
@@ -1,3 +1,4 @@
+from pyisda.date import cds_accrued
from serenitas.analytics.api import Portfolio, DualCorrTranche
from serenitas.analytics.dates import prev_business_day
from serenitas.analytics.utils import get_fx
@@ -55,37 +56,63 @@ def insert_tranche_pnl_explain(portf, conn):
with conn.cursor(binary=True) as c:
c.execute("SELECT * FROM tranche_risk WHERE date=%s", (prev_day,))
prev_day_risk = {rec.tranche_id: rec for rec in c}
- c.execute("SELECT id, upfront FROM cds WHERE trade_date=%s", (value_date,))
+ c.execute(
+ "SELECT cds.id, cds.upfront, cds_delta.upfront AS delta_upfront, "
+ "cds_delta.notional * (CASE WHEN cds_delta.protection='Buyer' THEN -1.0 ELSE 1.0 END) AS notional, "
+ "cds.currency::text FROM cds "
+ " LEFT JOIN cds AS cds_delta ON cds_delta.id=cds.delta_id "
+ "WHERE cds.trade_date=%s",
+ (value_date,),
+ )
daily_trades = {rec.id: rec for rec in c}
c.execute(
- "SELECT dealid, termination_amount, termination_fee "
- "FROM terminations WHERE deal_type='CDS' AND termination_date=%s",
+ "SELECT terminations.dealid, termination_amount, termination_fee, terminations.currency::text, "
+ "cds.notional * delta_alloc * (CASE WHEN cds.protection='Buyer' THEN -1.0 ELSE 1.0 END) AS notional, "
+ "cds.upfront * delta_alloc AS delta_upfront "
+ "FROM terminations LEFT JOIN cds ON cds.id=terminations.delta_id "
+ "WHERE deal_type='CDS' AND termination_date=%s",
(value_date,),
)
terminations = {int(rec.dealid.removeprefix("SCCDS")): rec for rec in c}
- today_trades = {trade_id: trade for (strat, trade_id), trade in portf.items()}
- all_ids = today_trades.keys() | prev_day_risk.keys()
+ current_trades = {trade_id: trade for (strat, trade_id), trade in portf.items()}
+ all_ids = current_trades.keys() | prev_day_risk.keys()
to_insert = []
for trade_id in all_ids:
pnl = 0
+ fx_pnl = 0
if trade_id in daily_trades:
- pnl += daily_trades[trade_id].upfront * get_fx(
- value_date, trade.currency
- )
+ trade = daily_trades[trade_id]
+ pnl = trade.upfront * get_fx(value_date, trade.currency)
if trade_id in terminations:
- pnl += terminations[trade_id].termination_fee * get_fx(
- value_date, trade.currency
+ term = terminations[trade_id]
+ pnl += term.termination_fee * get_fx(value_date, term.currency)
+ fx_pnl += term.termination_fee * (
+ get_fx(value_date, term.currency) - get_fx(prev_day, term.currency)
)
- if trade_id not in today_trades:
+ if trade_id not in current_trades:
previous_risk = prev_day_risk[trade_id]
pnl = pnl - (previous_risk.clean_nav + previous_risk.accrued)
+ dirty_index_pv = (
+ 1
+ - previous_risk.index_refprice * 0.01
+ - cds_accrued(prev_day, previous_risk.running * 1e-4)
+ )
+ if (
+ term.delta_upfront
+ ): # if None means either no delta or we didn't populate
+ delta_pnl = (
+ term.delta_upfront
+ - term.notional * dirty_index_pv * previous_risk.index_factor
+ )
+ else:
+ delta_pnl = 0.0
else:
- trade = today_trades[trade_id]
+ trade = current_trades[trade_id]
if trade_id in prev_day_risk:
previous_risk = prev_day_risk[trade_id]
- pnl = trade.pv * get_fx(value_date, trade.currency) - (
+ pnl += trade.pv * get_fx(value_date, trade.currency) - (
previous_risk.clean_nav + previous_risk.accrued
)
@@ -99,21 +126,31 @@ def insert_tranche_pnl_explain(portf, conn):
* previous_risk.notional
* (
float(trade._index.pv())
+ * get_fx(value_date, trade.currency)
- (1 - previous_risk.index_refprice * 0.01)
+ * get_fx(prev_day, trade.currency)
)
- * get_fx(prev_day, trade.currency)
)
else:
- fx_pnl = trade.pv * (
- get_fx(value_date, trade.currency)
- - get_fx(prev_day, trade.currency)
- )
- delta_pnl = 0.0
- to_insert.append((value_date, trade_id, pnl, fx_pnl, delta_pnl))
+ fx_pnl = 0.0
+ day_trade = daily_trades[trade_id]
+ dirty_index_pv = float(trade._index.pv() - trade._index.accrued())
+ if day_trade.notional:
+ delta_pnl = (
+ day_trade.notional * dirty_index_pv * trade._index.factor
+ - day_trade.delta_upfront
+ )
+ else: # if None means either no delta or we didn't populate
+ delta_pnl = 0
+ pnl += trade.pv * get_fx(value_date, trade.currency)
+ unexplained = pnl - delta_pnl - fx_pnl
+ to_insert.append(
+ (value_date, trade_id, pnl, fx_pnl, delta_pnl, unexplained)
+ )
c.executemany(
- "INSERT INTO tranche_pnl_explain(date, tranche_id, pnl, fx_pnl, delta_pnl) "
- "VALUES (%s, %s, %s, %s, %s)",
+ "INSERT INTO tranche_pnl_explain(date, tranche_id, pnl, fx_pnl, delta_pnl, unexplained) "
+ "VALUES (%s, %s, %s, %s, %s, %s)",
to_insert,
)
conn.commit()
@@ -151,8 +188,6 @@ def insert_tranche_risk(portf, conn):
" ON CONFLICT (date, tranche_id) DO UPDATE "
f"SET {update_str}"
)
- value_date = portf.value_date
- prev_day = prev_business_day(value_date)
with conn.cursor(binary=True) as c:
for (strat, trade_id), trade in portf.items():
logger.info(f"marking tranche {trade_id} in {strat}")
diff --git a/python/strat_cash_realloc.py b/python/strat_cash_realloc.py
index f8889f54..0bfafce0 100644
--- a/python/strat_cash_realloc.py
+++ b/python/strat_cash_realloc.py
@@ -18,7 +18,7 @@ def generate_csv(date, fund="BOWDST"):
if c.fetchone():
return
c.execute(
- "SELECT * FROM list_orphaned_cash(%s, %s) ",
+ "SELECT * FROM list_orphaned_cash(%s, %s) WHERE abs(amount) > 1",
(
date,
fund,
diff --git a/python/task_server/__main__.py b/python/task_server/__main__.py
index a5ccf8a9..2fb1f1e6 100644
--- a/python/task_server/__main__.py
+++ b/python/task_server/__main__.py
@@ -33,7 +33,7 @@ if args.download:
for fund in ("SERCGMAST", "BOWDST"):
download_data(engine, args.date, fund)
elif args.upload:
- time = datetime.datetime.now().time()
- date = datetime.datetime.combine(args.date, time)
for fund in ("SERCGMAST", "BOWDST"):
+ time = datetime.datetime.now().time()
+ date = datetime.datetime.combine(args.date, time)
upload_data(engine, date, fund)
diff --git a/python/task_server/globeop.py b/python/task_server/globeop.py
index 560dcee3..0fa225dc 100644
--- a/python/task_server/globeop.py
+++ b/python/task_server/globeop.py
@@ -164,6 +164,12 @@ def insert_todb(engine, workdate: datetime.date, fund="SERCGMAST"):
if not report_file.exists():
continue
df = fun(report_file)
+ if workdate >= datetime.date(2022, 11, 25):
+ match report:
+ case "Valuation" | "Pnl":
+ df.loc[~df.invid.isin(["USD", "EUR"]), "custacctname"] = df.loc[
+ ~df.invid.isin(["USD", "EUR"])
+ ].custacctname.str.replace("V0NSCLMAMB", "159260.1")
if report == "Valuation":
period_end_date = pd.Timestamp(df.periodenddate[0])
sql_str = (
@@ -199,14 +205,14 @@ def upload_bond_marks(engine, workdate: datetime.datetime, fund):
),
)
df.rename(columns={"identifier": "IDENTIFIER", "price": "Price"}, inplace=True)
- fullpath = DAILY_DIR / str(d) / f"securitiesNpv{workdate:%Y%m%d_%H%M%S}.csv"
+ fullpath = DAILY_DIR / str(d) / f"securitiesNpv{workdate:%Y%m%d_%H%M%S}_{fund}.csv"
df.to_csv(fullpath, index=False)
match fund:
case "SERCGMAST":
server = FtpClient.from_creds("globeop", folder="incoming")
case "BOWDST":
server = SftpClient.from_creds("hm_globeop", folder="incoming/gopricing")
- server.put(fullpath)
+ server.put(fullpath, f"securitiesNpv{workdate:%Y%m%d_%H%M%S}.csv")
logger.info("upload bond marks done")
@@ -222,14 +228,14 @@ JOIN cds USING (security_id)""",
fund,
),
)
- fullpath = DAILY_DIR / str(d) / f"otcNpv{workdate:%Y%m%d}.csv"
+ fullpath = DAILY_DIR / str(d) / f"otcNpv{workdate:%Y%m%d}_{fund}.csv"
df.to_csv(fullpath, index=False)
match fund:
case "SERCGMAST":
server = FtpClient.from_creds("globeop", folder="incoming")
case "BOWDST":
server = SftpClient.from_creds("hm_globeop", folder="incoming/gopricing")
- server.put(fullpath)
+ server.put(fullpath, f"otcNpv{workdate:%Y%m%d}.csv")
logger.info("upload cds marks done")
diff --git a/python/upload_bbh_bonds.py b/python/upload_bbh_bonds.py
deleted file mode 100644
index faeac491..00000000
--- a/python/upload_bbh_bonds.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import argparse
-import datetime
-from serenitas.ops.trade_dataclasses import BondDeal
-from serenitas.ops.funds import UMB
-from serenitas.ops.custodians import upload_to_bbh
-
-
-def main(upload, trade_date, fund="BRINKER"):
- upload_to_bbh(upload=upload, trade_date=trade_date, fund=fund)
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument(
- "trade_date",
- type=datetime.date.fromisoformat,
- default=datetime.date.today(),
- nargs="?",
- )
- parser.add_argument(
- "-n",
- "--no-upload",
- action="store_true",
- help="do not upload just drop in the daily directory",
- default=False,
- )
- args = parser.parse_args()
- main(not args.no_upload, args.trade_date)
diff --git a/python/upload_bbh_trades.py b/python/upload_bbh_trades.py
deleted file mode 100644
index 519623c1..00000000
--- a/python/upload_bbh_trades.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import csv
-from serenitas.utils.db import dbconn, dawn_engine
-import datetime
-from serenitas.utils.misc import rename_keys
-import pandas as pd
-from sqlalchemy.exc import IntegrityError
-from io import StringIO
-from serenitas.utils.env import DAILY_DIR
-from serenitas.utils.remote import SftpClient
-from csv_headers.bond_upload import BBH_BONDS as headers
-
-
-def _include_headers_only(obj, headers):
- new_obj = {}
- for header in headers:
- new_obj[header] = obj.get(header, None)
- new_obj["tradeid"] = obj.get("tradeid")
- return new_obj
-
-
-def _serialize(obj):
- rename_keys(
- obj,
- {
- "dealid": "Client Reference Number",
- "identifier": "Security ID",
- "accrued_payment": "Interest Amount",
- "dtc_number": "Trading Broker Type/ID",
- "principal_payment": "Principal Amount",
- "faceamount": "Unit / Original Face Amount",
- "current_face": "Current Face/Amortize Value",
- "price": "Unit Price Amount",
- "net_amount": "Net Amount",
- },
- )
- trade_details = {
- "Trade Date": obj["trade_date"].strftime("%m/%d/%Y"),
- "Settlement Date": obj["settle_date"].strftime("%m/%d/%Y"),
- "Place of Settlement/Country": "DTCYUS33",
- "Transaction Type": "RVP" if obj["buysell"] else "DVP",
- "Function of Instruction": "NEWM",
- "Account Number": "4023461",
- "Currency": "USD",
- "Clearing Broker ID / Type": obj["Trading Broker Type/ID"],
- "Other Fees Amount": 0,
- "Commission Amount": 0,
- "SEC Fees Amount": 0,
- }
- obj.update(trade_details)
- return _include_headers_only(obj, headers)
-
-
-def process_upload(obj):
- buf = StringIO()
- csvwriter = csv.writer(buf)
- csvwriter.writerow(headers)
- csvwriter.writerow([obj.get(header, None) for header in headers])
- buf = buf.getvalue().encode()
- fname = f'LMCG_BBH_TRADES_P.{obj["Client Reference Number"].replace("_", "")}.csv'
- dest = DAILY_DIR / str(datetime.date.today()) / fname
- sftp = SftpClient.from_creds("bbh")
- sftp.put(buf, fname)
- dest.write_bytes(buf)
-
-
-if __name__ == "__main__":
- import argparse
-
- parser = argparse.ArgumentParser(description="Upload trades to BBH")
- parser.add_argument(
- "date",
- nargs="?",
- type=datetime.date.fromisoformat,
- default=(datetime.date.today() - datetime.timedelta(days=7)),
- )
- args = parser.parse_args()
- conn = dbconn("dawndb")
- with conn.cursor() as c:
- sql_query = "SELECT bond_trades.* FROM bond_trades LEFT JOIN counterparties ON cp_code=code WHERE cash_counterparty AND trade_date >= '2022-04-05' AND trade_date >=%s AND fund='BRINKER' and faceamount is not null and faceamount >0;"
- c.execute(
- sql_query, (args.date,)
- ) # We don't want to upload trades before 2022-04-05 so we're filtering on the trade_date twice
-
- for row in c:
- obj = row._asdict()
- obj = _serialize(obj)
- df = pd.DataFrame(
- obj,
- index=[
- "i",
- ],
- )
- try:
- df.to_sql(
- "bbh_bond_upload", dawn_engine, index=False, if_exists="append"
- )
- except IntegrityError:
- conn.rollback()
- else:
- process_upload(obj)
diff --git a/python/upload_umb_bonds.py b/python/upload_umb_bonds.py
deleted file mode 100644
index 9878a3c2..00000000
--- a/python/upload_umb_bonds.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import argparse
-import datetime
-from serenitas.ops.trade_dataclasses import BondDeal
-from serenitas.ops.funds import UMB
-from serenitas.ops.custodians import upload_to_umb
-
-parser = argparse.ArgumentParser()
-parser.add_argument(
- "trade_date",
- type=datetime.date.fromisoformat,
- default=datetime.date.today(),
- nargs="?",
-)
-parser.add_argument(
- "-n",
- "--no-upload",
- action="store_true",
- help="do not upload just drop in the daily directory",
- default=False,
-)
-args = parser.parse_args()
-upload_to_umb(not args.no_upload, trade_date=args.trade_date, fund="SERCGMAST")