aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/bbg_helpers.py101
-rw-r--r--python/bbg_index_quotes.py54
-rw-r--r--python/bbg_prices.py101
-rw-r--r--python/calibrate_tranches_BC.py173
-rw-r--r--python/cds_curve.py81
-rw-r--r--python/cds_rebook.py99
-rw-r--r--python/exchange.py37
-rw-r--r--python/external_deriv_marks.py30
-rw-r--r--python/globeop_reports.py292
-rw-r--r--python/gmail_helpers.py171
-rw-r--r--python/handle_default.py76
-rw-r--r--python/load_globeop_report.py204
-rw-r--r--python/load_refentity.py168
-rw-r--r--python/markit_red.py121
-rw-r--r--python/parse_citi_pdf.py43
-rw-r--r--python/parse_gs.py122
-rw-r--r--python/position.py355
-rw-r--r--python/process_queue.py939
-rw-r--r--python/sftp.py19
-rw-r--r--python/swaption_quotes.py101
-rw-r--r--python/task_runner.py16
-rw-r--r--python/test_hyoption.py16
22 files changed, 2089 insertions, 1230 deletions
diff --git a/python/bbg_helpers.py b/python/bbg_helpers.py
index 6205f7ab..1abac308 100644
--- a/python/bbg_helpers.py
+++ b/python/bbg_helpers.py
@@ -7,7 +7,8 @@ import datetime
logger = logging.getLogger(__name__)
-BBG_IP = ["guillaume-pc", "edwin-pc", '192.168.0.10', '192.168.0.12']
+BBG_IP = ["guillaume-pc", "edwin-pc", "192.168.0.10", "192.168.0.12"]
+
@contextmanager
def init_bbg_session(ip_list, port=8194):
@@ -33,8 +34,9 @@ def init_bbg_session(ip_list, port=8194):
finally:
session.stop()
+
def append_overrides(request, d):
- overrides = request.getElement('overrides')
+ overrides = request.getElement("overrides")
for k, v in d.items():
o = overrides.appendElement()
o.setElement("fieldId", k)
@@ -47,7 +49,7 @@ def append_overrides(request, d):
def event_loop(session, request):
session.sendRequest(request)
# Process received events
- while(True):
+ while True:
# We provide timeout to give the chance to Ctrl+C handling:
ev = session.nextEvent(500)
if ev.eventType() in [blpapi.Event.PARTIAL_RESPONSE, blpapi.Event.RESPONSE]:
@@ -76,10 +78,14 @@ def get_pythonvalue(e):
def field_array_todf(field):
- df = pd.DataFrame.from_dict([{str(e.name()): get_pythonvalue(e) for e in f.elements()} \
- for f in field.values()])
+ df = pd.DataFrame.from_dict(
+ [
+ {str(e.name()): get_pythonvalue(e) for e in f.elements()}
+ for f in field.values()
+ ]
+ )
if "date" in df:
- df = df.set_index('date')
+ df = df.set_index("date")
return df
@@ -101,7 +107,9 @@ def process_reference_msg(msg):
if not field.isValid():
logger.info(f"Invalid field: {str(field)}")
elif field.isArray():
- row[str(field.name())] = field_array_todf(field) #to convert dates to timestamps
+ row[str(field.name())] = field_array_todf(
+ field
+ ) # to convert dates to timestamps
else:
row[str(field.name())] = get_pythonvalue(field)
data[securityName] = row
@@ -113,9 +121,17 @@ def process_intraday_tick_msg(msg):
return field_array_todf(tickdata)
-def retrieve_data(session, securities, fields=[], overrides={},
- start_date=None, end_date=None, frequency="DAILY",
- options={}, event_types=["TRADE"]):
+def retrieve_data(
+ session,
+ securities,
+ fields=[],
+ overrides={},
+ start_date=None,
+ end_date=None,
+ frequency="DAILY",
+ options={},
+ event_types=["TRADE"],
+):
"""
Convenience function to retrieve data from the Bloomberg API.
@@ -154,7 +170,7 @@ def retrieve_data(session, securities, fields=[], overrides={},
else:
request.set("security", securities)
else:
- if hasattr(securities, '__iter__'):
+ if hasattr(securities, "__iter__"):
for security in securities:
request.append("securities", security)
else:
@@ -171,12 +187,12 @@ def retrieve_data(session, securities, fields=[], overrides={},
if start_date:
if request.asElement().name() == blpapi.Name("IntradayTickRequest"):
- start_date = timezone('America/New_York').localize(start_date)
- start_date = start_date.astimezone(timezone('GMT'))
+ start_date = timezone("America/New_York").localize(start_date)
+ start_date = start_date.astimezone(timezone("GMT"))
request.set("startDateTime", start_date)
if end_date:
- end_date = timezone('America/New_York').localize(end_date)
- end_date = end_date.astimezone(timezone('GMT'))
+ end_date = timezone("America/New_York").localize(end_date)
+ end_date = end_date.astimezone(timezone("GMT"))
request.set("endDateTime", end_date)
else:
request.set("startDate", "{:%Y%m%d}".format(start_date))
@@ -185,8 +201,8 @@ def retrieve_data(session, securities, fields=[], overrides={},
data = {}
for msg in event_loop(session, request):
- if msg.hasElement('responseError'):
- logger.error(msg.getElement('responseError').getElementAsString('message'))
+ if msg.hasElement("responseError"):
+ logger.error(msg.getElement("responseError").getElementAsString("message"))
continue
if msg.messageType() == blpapi.Name("ReferenceDataResponse"):
data.update(process_reference_msg(msg))
@@ -196,8 +212,8 @@ def retrieve_data(session, securities, fields=[], overrides={},
data.update(process_intraday_tick_msg(msg))
if request.asElement().name() == blpapi.Name("IntradayTickRequest"):
df = pd.DataFrame(data)
- if 'time' in df:
- df.time = df.time.dt.tz_convert('America/New_York')
+ if "time" in df:
+ df.time = df.time.dt.tz_convert("America/New_York")
return df
else:
return data
@@ -205,26 +221,37 @@ def retrieve_data(session, securities, fields=[], overrides={},
if __name__ == "__main__":
testdate = pd.datetime(2013, 1, 1)
- hist_securities = ['CADUSD Curncy', "EURUSD Curncy"]
- hist_fields = ['PX_LAST']
- securities = ['004421BW2 Mtge', '75157EAE2 Mtge', 'XS0295516776 Mtge']
- fields = ['CUR_CPN', 'START_ACC_DT']
+ hist_securities = ["CADUSD Curncy", "EURUSD Curncy"]
+ hist_fields = ["PX_LAST"]
+ securities = ["004421BW2 Mtge", "75157EAE2 Mtge", "XS0295516776 Mtge"]
+ fields = ["CUR_CPN", "START_ACC_DT"]
with init_bbg_session(BBG_IP) as session:
- hist_data = retrieve_data(session, securities, hist_fields, start_date=testdate.date(),
- frequency="MONTHLY")
- overrides={'SETTLE_DT': testdate}
+ hist_data = retrieve_data(
+ session,
+ securities,
+ hist_fields,
+ start_date=testdate.date(),
+ frequency="MONTHLY",
+ )
+ overrides = {"SETTLE_DT": testdate}
ref_data = retrieve_data(session, securities, fields, overrides=overrides)
struct_data = retrieve_data(session, securities, ["HIST_CASH_FLOW"])
- spx_ndx_monthly = retrieve_data(session, ["SPX Index", "NDX Index"],
- fields=["PX_LAST"],
- start_date=datetime.date(2012, 1, 1),
- options={'periodicityAdjustment': 'ACTUAL'},
- frequency="MONTHLY")
- trace_data = retrieve_data(session, "BNCMT 2007-1 A5@TRAC Mtge",
- start_date=datetime.datetime(2018, 5, 18, 9),
- end_date=datetime.datetime(2018, 9, 18, 9))
+ spx_ndx_monthly = retrieve_data(
+ session,
+ ["SPX Index", "NDX Index"],
+ fields=["PX_LAST"],
+ start_date=datetime.date(2012, 1, 1),
+ options={"periodicityAdjustment": "ACTUAL"},
+ frequency="MONTHLY",
+ )
+ trace_data = retrieve_data(
+ session,
+ "BNCMT 2007-1 A5@TRAC Mtge",
+ start_date=datetime.datetime(2018, 5, 18, 9),
+ end_date=datetime.datetime(2018, 9, 18, 9),
+ )
today = datetime.datetime.today()
yesterday = today - datetime.timedelta(days=1)
- tick_data = retrieve_data(session, "GOOG US Equity",
- start_date=yesterday,
- end_date=today)
+ tick_data = retrieve_data(
+ session, "GOOG US Equity", start_date=yesterday, end_date=today
+ )
diff --git a/python/bbg_index_quotes.py b/python/bbg_index_quotes.py
index b11d0395..b8f016f5 100644
--- a/python/bbg_index_quotes.py
+++ b/python/bbg_index_quotes.py
@@ -6,32 +6,44 @@ securities = {}
for series in range(9, 32):
for index_type in ["IG", "HY"]:
for t in [3, 5, 7, 10]:
- securities[f"CDX {index_type} CDSI S{series} {t}Y Corp"] = \
- (series, index_type, f"{t}yr")
+ securities[f"CDX {index_type} CDSI S{series} {t}Y Corp"] = (
+ series,
+ index_type,
+ f"{t}yr",
+ )
for series in range(10, 31):
for index_type in ["EUR", "XOVER"]:
for t in [3, 5, 7, 10]:
- securities[f"ITRX {index_type} CDSI S{series} {t}Y Corp"] = \
- (series, index_type, f"{t}yr")
+ securities[f"ITRX {index_type} CDSI S{series} {t}Y Corp"] = (
+ series,
+ index_type,
+ f"{t}yr",
+ )
-place_holders = ",".join(['%s'] * 7)
-sql_str_price = ("INSERT INTO index_quotes_pre"
- "(date, index, series, version, tenor, close_price, source)"
- f" VALUES({place_holders}) ON CONFLICT DO NOTHING")
-sql_str_spread = ("INSERT INTO index_quotes_pre"
- "(date, index, series, version, tenor, close_spread, source)"
- f" VALUES({place_holders}) ON CONFLICT DO NOTHING")
+place_holders = ",".join(["%s"] * 7)
+sql_str_price = (
+ "INSERT INTO index_quotes_pre"
+ "(date, index, series, version, tenor, close_price, source)"
+ f" VALUES({place_holders}) ON CONFLICT DO NOTHING"
+)
+sql_str_spread = (
+ "INSERT INTO index_quotes_pre"
+ "(date, index, series, version, tenor, close_spread, source)"
+ f" VALUES({place_holders}) ON CONFLICT DO NOTHING"
+)
start_date = datetime.date.today() - datetime.timedelta(days=7) # one weeek of overlap
-conn = dbconn('serenitasdb')
+conn = dbconn("serenitasdb")
with init_bbg_session(BBG_IP) as session:
d = retrieve_data(session, securities.keys(), fields=["TICKER", "VERSION"])
- ticker_mapping = {v['TICKER']: securities[k] + (v['VERSION'],)
- for k, v in d.items() if v}
+ ticker_mapping = {
+ v["TICKER"]: securities[k] + (v["VERSION"],) for k, v in d.items() if v
+ }
for pcs in ["MSG1", "CMAN", "CBGN"]:
securities = [f"{e['TICKER']} {pcs} Curncy" for e in d.values() if e]
- d2 = retrieve_data(session, securities, fields=["PX_LAST"],
- start_date=start_date)
+ d2 = retrieve_data(
+ session, securities, fields=["PX_LAST"], start_date=start_date
+ )
with conn.cursor() as c:
for k, v in d2.items():
ticker = k.split()[0]
@@ -45,7 +57,11 @@ with init_bbg_session(BBG_IP) as session:
else:
sql_str = sql_str_spread
if not v.empty:
- c.executemany(sql_str,
- [(t[0], index, series, version, tenor, t[1], pcs)
- for t in v.itertuples()])
+ c.executemany(
+ sql_str,
+ [
+ (t[0], index, series, version, tenor, t[1], pcs)
+ for t in v.itertuples()
+ ],
+ )
conn.commit()
diff --git a/python/bbg_prices.py b/python/bbg_prices.py
index e9b549ea..dbd95016 100644
--- a/python/bbg_prices.py
+++ b/python/bbg_prices.py
@@ -4,54 +4,95 @@ import numpy as np
import pandas as pd
from psycopg2.extensions import register_adapter, AsIs
+
register_adapter(type(pd.NaT), lambda nat: AsIs(None))
-engine = create_engine('postgresql://et_user@debian/ET')
+engine = create_engine("postgresql://et_user@debian/ET")
-fields_update = ["LN_ISSUE_STATUS", "AMT_OUTSTANDING", "PX_LAST","LAST_UPDATE_DT",
- "LN_CURRENT_MARGIN", "DEFAULTED", "DEFAULT_DATE",
- "CALLED", "CALLED_DT", "PRICING_SOURCE"]
+fields_update = [
+ "LN_ISSUE_STATUS",
+ "AMT_OUTSTANDING",
+ "PX_LAST",
+ "LAST_UPDATE_DT",
+ "LN_CURRENT_MARGIN",
+ "DEFAULTED",
+ "DEFAULT_DATE",
+ "CALLED",
+ "CALLED_DT",
+ "PRICING_SOURCE",
+]
# append securities to request
-cusips = pd.read_sql_query("SELECT id_bb_unique, substring(id_bb_unique from 3) AS cusip " \
- "FROM bloomberg_corp_ref " \
- "WHERE (status is Null or status NOT IN ('REFINANCED','RETIRED', 'REPLACED')) "\
- "AND not called", engine, index_col='cusip')
+cusips = pd.read_sql_query(
+ "SELECT id_bb_unique, substring(id_bb_unique from 3) AS cusip "
+ "FROM bloomberg_corp_ref "
+ "WHERE (status is Null or status NOT IN ('REFINANCED','RETIRED', 'REPLACED')) "
+ "AND not called",
+ engine,
+ index_col="cusip",
+)
securities = ["{0} Corp".format(cusip) for cusip in cusips.index]
with init_bbg_session(BBG_IP) as session:
data = retrieve_data(session, securities, fields_update)
-df = pd.DataFrame.from_dict(data, orient='index')
-df['security'] = df.index.str.slice(0,9)
-df.set_index(['security'], inplace=True)
-df['ID_BB_UNIQUE'] = cusips['id_bb_unique']
+df = pd.DataFrame.from_dict(data, orient="index")
+df["security"] = df.index.str.slice(0, 9)
+df.set_index(["security"], inplace=True)
+df["ID_BB_UNIQUE"] = cusips["id_bb_unique"]
df.reset_index(inplace=True)
-to_insert = df[['DEFAULTED', 'DEFAULT_DATE', 'CALLED', 'CALLED_DT', 'LN_ISSUE_STATUS', 'ID_BB_UNIQUE']]
+to_insert = df[
+ [
+ "DEFAULTED",
+ "DEFAULT_DATE",
+ "CALLED",
+ "CALLED_DT",
+ "LN_ISSUE_STATUS",
+ "ID_BB_UNIQUE",
+ ]
+]
to_insert = to_insert.where(to_insert.notnull(), None)
conn = engine.raw_connection()
with conn.cursor() as c:
- c.executemany("UPDATE bloomberg_corp_ref SET defaulted = %(DEFAULTED)s, " \
- "default_date = %(DEFAULT_DATE)s, called= %(CALLED)s, called_date = %(CALLED_DT)s, " \
- "status = %(LN_ISSUE_STATUS)s WHERE id_bb_unique=%(ID_BB_UNIQUE)s",
- to_insert.to_dict('records'))
+ c.executemany(
+ "UPDATE bloomberg_corp_ref SET defaulted = %(DEFAULTED)s, "
+ "default_date = %(DEFAULT_DATE)s, called= %(CALLED)s, called_date = %(CALLED_DT)s, "
+ "status = %(LN_ISSUE_STATUS)s WHERE id_bb_unique=%(ID_BB_UNIQUE)s",
+ to_insert.to_dict("records"),
+ )
conn.commit()
-currentdata = pd.read_sql_query("SELECT id_bb_unique, pricingdate from bloomberg_corp",
- engine,
- parse_dates=["pricingdate"],
- index_col=['id_bb_unique', 'pricingdate'])
-#no need to insert empty prices
-df.dropna(subset=['PX_LAST', 'LAST_UPDATE_DT'], inplace=True)
-df.set_index(['ID_BB_UNIQUE', 'LAST_UPDATE_DT'], inplace=True)
+currentdata = pd.read_sql_query(
+ "SELECT id_bb_unique, pricingdate from bloomberg_corp",
+ engine,
+ parse_dates=["pricingdate"],
+ index_col=["id_bb_unique", "pricingdate"],
+)
+# no need to insert empty prices
+df.dropna(subset=["PX_LAST", "LAST_UPDATE_DT"], inplace=True)
+df.set_index(["ID_BB_UNIQUE", "LAST_UPDATE_DT"], inplace=True)
df = df.ix[df.index.difference(currentdata.index)]
-df.index.names = ['ID_BB_UNIQUE', 'LAST_UPDATE_DT']
+df.index.names = ["ID_BB_UNIQUE", "LAST_UPDATE_DT"]
df.reset_index(inplace=True)
-to_insert = df[['ID_BB_UNIQUE','LAST_UPDATE_DT','PX_LAST','LN_CURRENT_MARGIN',
- 'AMT_OUTSTANDING', 'PRICING_SOURCE']]
-to_insert.columns = ['id_bb_unique', 'pricingdate', 'price', 'loan_margin',
- 'amount_outstanding', 'source']
-to_insert.to_sql('bloomberg_corp', engine, if_exists='append', index=False)
+to_insert = df[
+ [
+ "ID_BB_UNIQUE",
+ "LAST_UPDATE_DT",
+ "PX_LAST",
+ "LN_CURRENT_MARGIN",
+ "AMT_OUTSTANDING",
+ "PRICING_SOURCE",
+ ]
+]
+to_insert.columns = [
+ "id_bb_unique",
+ "pricingdate",
+ "price",
+ "loan_margin",
+ "amount_outstanding",
+ "source",
+]
+to_insert.to_sql("bloomberg_corp", engine, if_exists="append", index=False)
diff --git a/python/calibrate_tranches_BC.py b/python/calibrate_tranches_BC.py
index b6d7a37f..8fde82e9 100644
--- a/python/calibrate_tranches_BC.py
+++ b/python/calibrate_tranches_BC.py
@@ -11,9 +11,11 @@ import argparse
def get_lastdate(conn, index, series, tenor):
- sql_str = ("SELECT (max(date) AT TIME ZONE 'America/New_York')::date + 1 "
- "AS date FROM risk_numbers "
- "WHERE index=%s and series = %s and tenor = %s")
+ sql_str = (
+ "SELECT (max(date) AT TIME ZONE 'America/New_York')::date + 1 "
+ "AS date FROM risk_numbers "
+ "WHERE index=%s and series = %s and tenor = %s"
+ )
with conn.cursor() as c:
c.execute(sql_str, (index, series, tenor))
date, = c.fetchone()
@@ -22,31 +24,47 @@ def get_lastdate(conn, index, series, tenor):
def build_sql_str(df):
- cols = ','.join(df.columns)
- cols_ex_tranche_id = ','.join([c for c in df.columns if c != 'tranche_id'])
- cols_excluded = ','.join([f"excluded.{c}" for c in df.columns if c != 'tranche_id'])
+ cols = ",".join(df.columns)
+ cols_ex_tranche_id = ",".join([c for c in df.columns if c != "tranche_id"])
+ cols_excluded = ",".join([f"excluded.{c}" for c in df.columns if c != "tranche_id"])
place_holders = ",".join([f"%({c})s" for c in df.columns])
- sql_str = (f"INSERT INTO tranche_risk({cols}) "
- f"VALUES({place_holders}) ON CONFLICT (tranche_id) DO "
- f"UPDATE SET ({cols_ex_tranche_id}) = ({cols_excluded})")
+ sql_str = (
+ f"INSERT INTO tranche_risk({cols}) "
+ f"VALUES({place_holders}) ON CONFLICT (tranche_id) DO "
+ f"UPDATE SET ({cols_ex_tranche_id}) = ({cols_excluded})"
+ )
return sql_str
if __name__ == "__main__":
from utils import SerenitasFileHandler
- logger = logging.getLogger('tranche_calib')
+
+ logger = logging.getLogger("tranche_calib")
parser = argparse.ArgumentParser()
- parser.add_argument("-u", "--update", action="store_true", default=False,
- help="Update from the last run date [default %default]")
- parser.add_argument("-c", "--config", metavar="config_file",
- help="Runs the list of indices provided in CONFIG_FILE")
+ parser.add_argument(
+ "-u",
+ "--update",
+ action="store_true",
+ default=False,
+ help="Update from the last run date [default %default]",
+ )
+ parser.add_argument(
+ "-c",
+ "--config",
+ metavar="config_file",
+ help="Runs the list of indices provided in CONFIG_FILE",
+ )
parser.add_argument("-i", "--index", help="Index name we want to run")
- parser.add_argument("--tenor", default="5yr", help="Tenor we want to run [default '5yr']")
- parser.add_argument("--until", default=pd.Timestamp.now()-BDay(),
- type=lambda s: pd.Timestamp(s))
- parser.add_argument("--start_from", default=None,
- type=lambda s: pd.Timestamp(s))
- parser.add_argument("-d", "--debug", action="store_true", help="more verbose logging")
+ parser.add_argument(
+ "--tenor", default="5yr", help="Tenor we want to run [default '5yr']"
+ )
+ parser.add_argument(
+ "--until", default=pd.Timestamp.now() - BDay(), type=lambda s: pd.Timestamp(s)
+ )
+ parser.add_argument("--start_from", default=None, type=lambda s: pd.Timestamp(s))
+ parser.add_argument(
+ "-d", "--debug", action="store_true", help="more verbose logging"
+ )
args = parser.parse_args()
logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
CODE_DIR = Path(os.environ["CODE_DIR"])
@@ -57,48 +75,49 @@ if __name__ == "__main__":
handler.setFormatter(SerenitasFileHandler._formatter)
logger.addHandler(handler)
- start_dates = {# 'hy10': datetime.date(2014, 8, 11),
- # 'hy15': datetime.date(2014, 6, 10),
- # 'hy17': datetime.date(2013, 1, 1),
- 'hy19': datetime.date(2013, 2, 1),
- 'hy21': datetime.date(2013, 10, 4),
- 'hy23': datetime.date(2014, 10, 16),
- 'hy25': datetime.date(2015, 10, 1),
- 'hy27': datetime.date(2016, 10, 4),
- 'hy29': datetime.date(2017, 10, 3),
- 'hy31': datetime.date(2018, 10, 2),
- 'ig9': datetime.date(2013, 1, 1),
- 'ig19': datetime.date(2013, 5, 1),
- 'ig21': datetime.date(2013, 9, 26),
- 'ig23': datetime.date(2014, 10, 14),
- 'ig25': datetime.date(2015, 9, 22),
- 'ig27': datetime.date(2016, 9, 27),
- 'ig29': datetime.date(2017, 9, 26),
- 'ig31': datetime.date(2018, 9, 25),
- 'xo22': datetime.date(2014, 10, 20),
- 'xo24': datetime.date(2015, 9, 28),
- 'xo26': datetime.date(2016, 9, 27),
- 'xo28': datetime.date(2017, 9, 28),
- 'eu9': datetime.date(2014, 9, 15),
- 'eu19': datetime.date(2013, 4, 3),
- 'eu21': datetime.date(2014, 3, 27),
- 'eu22': datetime.date(2014, 10, 22),
- 'eu24': datetime.date(2015, 9, 23),
- 'eu26': datetime.date(2016, 9, 27),
- 'eu28': datetime.date(2017, 9, 28),
- 'eu30': datetime.date(2018, 9, 25),
- 'xo30': datetime.date(2018, 9, 25)}
+ start_dates = { # 'hy10': datetime.date(2014, 8, 11),
+ # 'hy15': datetime.date(2014, 6, 10),
+ # 'hy17': datetime.date(2013, 1, 1),
+ "hy19": datetime.date(2013, 2, 1),
+ "hy21": datetime.date(2013, 10, 4),
+ "hy23": datetime.date(2014, 10, 16),
+ "hy25": datetime.date(2015, 10, 1),
+ "hy27": datetime.date(2016, 10, 4),
+ "hy29": datetime.date(2017, 10, 3),
+ "hy31": datetime.date(2018, 10, 2),
+ "ig9": datetime.date(2013, 1, 1),
+ "ig19": datetime.date(2013, 5, 1),
+ "ig21": datetime.date(2013, 9, 26),
+ "ig23": datetime.date(2014, 10, 14),
+ "ig25": datetime.date(2015, 9, 22),
+ "ig27": datetime.date(2016, 9, 27),
+ "ig29": datetime.date(2017, 9, 26),
+ "ig31": datetime.date(2018, 9, 25),
+ "xo22": datetime.date(2014, 10, 20),
+ "xo24": datetime.date(2015, 9, 28),
+ "xo26": datetime.date(2016, 9, 27),
+ "xo28": datetime.date(2017, 9, 28),
+ "eu9": datetime.date(2014, 9, 15),
+ "eu19": datetime.date(2013, 4, 3),
+ "eu21": datetime.date(2014, 3, 27),
+ "eu22": datetime.date(2014, 10, 22),
+ "eu24": datetime.date(2015, 9, 23),
+ "eu26": datetime.date(2016, 9, 27),
+ "eu28": datetime.date(2017, 9, 28),
+ "eu30": datetime.date(2018, 9, 25),
+ "xo30": datetime.date(2018, 9, 25),
+ }
- serenitas_conn = dbconn('serenitasdb')
+ serenitas_conn = dbconn("serenitasdb")
if args.config is None:
if args.index is None:
raise ValueError("Please provide an index to run")
- config = {'runs': [(args.index, args.tenor)]}
+ config = {"runs": [(args.index, args.tenor)]}
else:
with (CODE_DIR / "etc" / args.config).open("r") as fh:
config = load(fh)
- for index, tenor in config['runs']:
+ for index, tenor in config["runs"]:
begin_date = None
index, series = index[:2].upper(), int(index[2:])
if args.start_from is not None:
@@ -123,7 +142,9 @@ if __name__ == "__main__":
logger.debug(f"calibrating for {d.date()}")
try:
if tranche_index is None:
- tranche_index = TrancheBasket(index, series, tenor, value_date=d.date())
+ tranche_index = TrancheBasket(
+ index, series, tenor, value_date=d.date()
+ )
else:
tranche_index.value_date = d.date()
except (RuntimeError, ValueError) as e:
@@ -141,31 +162,37 @@ if __name__ == "__main__":
logger.error(e)
continue
- df = pd.concat([tranche_index.tranche_deltas(),
- tranche_index.tranche_fwd_deltas(),
- tranche_index.tranche_durations(),
- tranche_index.tranche_EL(),
- tranche_index.tranche_spreads()], axis=1)
+ df = pd.concat(
+ [
+ tranche_index.tranche_deltas(),
+ tranche_index.tranche_fwd_deltas(),
+ tranche_index.tranche_durations(),
+ tranche_index.tranche_EL(),
+ tranche_index.tranche_spreads(),
+ ],
+ axis=1,
+ )
try:
- df['theta'] = tranche_index.tranche_thetas(method="TLP")
+ df["theta"] = tranche_index.tranche_thetas(method="TLP")
except ValueError:
- df['theta'] = None
+ df["theta"] = None
- df['index_duration'], df['index_expected_loss'], df['index_price'] = \
- tranche_index.index_pv()
- df['index_expected_loss'] *= -1
- df['index_duration'] -= tranche_index.accrued()
- df['index_basis'] = tranche_index.tweaks[0]
- df['index_theta'] = tranche_index.theta()[tenor]
- df['tranche_id'] = tranche_index.tranche_quotes.id.values
- df['corr_at_detach'] = tranche_index.rho[1:]
- df['corr01'] = tranche_index.tranche_corr01()
- del df['fwd_gamma']
+ df["index_duration"], df["index_expected_loss"], df[
+ "index_price"
+ ] = tranche_index.index_pv()
+ df["index_expected_loss"] *= -1
+ df["index_duration"] -= tranche_index.accrued()
+ df["index_basis"] = tranche_index.tweaks[0]
+ df["index_theta"] = tranche_index.theta()[tenor]
+ df["tranche_id"] = tranche_index.tranche_quotes.id.values
+ df["corr_at_detach"] = tranche_index.rho[1:]
+ df["corr01"] = tranche_index.tranche_corr01()
+ del df["fwd_gamma"]
data[d] = df
if data:
data = pd.concat(data)
sql_str = build_sql_str(data)
with serenitas_conn.cursor() as c:
- c.executemany(sql_str, data.to_dict(orient='record'))
+ c.executemany(sql_str, data.to_dict(orient="record"))
serenitas_conn.commit()
diff --git a/python/cds_curve.py b/python/cds_curve.py
index 2e0cf73f..5f8ff1cf 100644
--- a/python/cds_curve.py
+++ b/python/cds_curve.py
@@ -7,41 +7,52 @@ import pandas as pd
from utils.db import dbconn
-def all_curves_pv(curves, today_date, jp_yc, start_date, step_in_date, value_date, maturities):
+def all_curves_pv(
+ curves, today_date, jp_yc, start_date, step_in_date, value_date, maturities
+):
r = {}
for d in maturities:
tenor = {}
- coupon_leg = FeeLeg(start_date, d, True, 1., 1.)
+ coupon_leg = FeeLeg(start_date, d, True, 1.0, 1.0)
default_leg = ContingentLeg(start_date, d, True)
accrued = coupon_leg.accrued(step_in_date)
tickers = []
data = []
for sc in curves:
- coupon_leg_pv = coupon_leg.pv(today_date, step_in_date, value_date, jp_yc, sc, False)
- default_leg_pv = default_leg.pv(today_date, step_in_date, value_date,
- jp_yc, sc, 0.4)
+ coupon_leg_pv = coupon_leg.pv(
+ today_date, step_in_date, value_date, jp_yc, sc, False
+ )
+ default_leg_pv = default_leg.pv(
+ today_date, step_in_date, value_date, jp_yc, sc, 0.4
+ )
tickers.append(sc.ticker)
- data.append((coupon_leg_pv-accrued, default_leg_pv))
- r[pd.Timestamp(d)] = pd.DataFrame.from_records(data,
- index=tickers,
- columns=['duration', 'protection_pv'])
+ data.append((coupon_leg_pv - accrued, default_leg_pv))
+ r[pd.Timestamp(d)] = pd.DataFrame.from_records(
+ data, index=tickers, columns=["duration", "protection_pv"]
+ )
return pd.concat(r, axis=1).swaplevel(axis=1).sort_index(axis=1, level=0)
-def calibrate_portfolio(index_type, series, tenors=['3yr', '5yr', '7yr', '10yr'],
- start_date=None):
+def calibrate_portfolio(
+ index_type, series, tenors=["3yr", "5yr", "7yr", "10yr"], start_date=None
+):
try:
index = MarkitBasketIndex(index_type, series, tenors)
except ValueError:
return
if start_date:
index.index_quotes = index.index_quotes[start_date:]
- for value_date, v in index.index_quotes.groupby('date')['id']:
+ for value_date, v in index.index_quotes.groupby("date")["id"]:
index.value_date = value_date
index.tweak()
- df = pd.concat([index.theta(),
- index.duration(),
- pd.Series(index.tweaks, index=tenors, name='tweak')], axis=1)
+ df = pd.concat(
+ [
+ index.theta(),
+ index.duration(),
+ pd.Series(index.tweaks, index=tenors, name="tweak"),
+ ],
+ axis=1,
+ )
for (_, t), id in v.items():
yield (id, df.loc[t])
@@ -51,26 +62,33 @@ if __name__ == "__main__":
import argparse
import logging
import os
+
parser = argparse.ArgumentParser()
- parser.add_argument('index', help="index type (IG, HY, EU or XO)")
- parser.add_argument('series', help="series", type=int)
- parser.add_argument('--latest', required=False, action="store_true")
+ parser.add_argument("index", help="index type (IG, HY, EU or XO)")
+ parser.add_argument("series", help="series", type=int)
+ parser.add_argument("--latest", required=False, action="store_true")
args = parser.parse_args()
index, series = args.index, args.series
- conn = dbconn('serenitasdb')
+ conn = dbconn("serenitasdb")
if args.latest:
with conn.cursor() as c:
- c.execute("SELECT max(date) FROM index_quotes_pre "
- "RIGHT JOIN index_risk2 USING (id) "
- "WHERE index=%s AND series=%s "
- "AND tenor in ('3yr', '5yr', '7yr', '10yr')",
- (index, series))
+ c.execute(
+ "SELECT max(date) FROM index_quotes_pre "
+ "RIGHT JOIN index_risk2 USING (id) "
+ "WHERE index=%s AND series=%s "
+ "AND tenor in ('3yr', '5yr', '7yr', '10yr')",
+ (index, series),
+ )
start_date, = c.fetchone()
else:
start_date = None
- fh = logging.FileHandler(filename=os.path.join(os.getenv("LOG_DIR"), "index_curves.log"))
- formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
+ fh = logging.FileHandler(
+ filename=os.path.join(os.getenv("LOG_DIR"), "index_curves.log")
+ )
+ formatter = logging.Formatter(
+ "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
+ )
fh.setFormatter(formatter)
loggers = [logging.getLogger("analytics"), logging.getLogger("index_curves")]
for logger in loggers:
@@ -78,12 +96,13 @@ if __name__ == "__main__":
logger.addHandler(fh)
loggers[1].info(f"filling {index} {series}")
- g = calibrate_portfolio(index, series, ['3yr', '5yr', '7yr', '10yr'],
- start_date)
+ g = calibrate_portfolio(index, series, ["3yr", "5yr", "7yr", "10yr"], start_date)
with conn.cursor() as c:
for id, t in g:
- c.execute("INSERT INTO index_risk2 VALUES(%s, %s, %s, %s) ON CONFLICT (id) "
- "DO UPDATE SET theta=%s, duration=%s, tweak=%s",
- (id,) + tuple(t) + tuple(t))
+ c.execute(
+ "INSERT INTO index_risk2 VALUES(%s, %s, %s, %s) ON CONFLICT (id) "
+ "DO UPDATE SET theta=%s, duration=%s, tweak=%s",
+ (id,) + tuple(t) + tuple(t),
+ )
conn.commit()
conn.close()
diff --git a/python/cds_rebook.py b/python/cds_rebook.py
index d4c4eb56..fa826913 100644
--- a/python/cds_rebook.py
+++ b/python/cds_rebook.py
@@ -5,6 +5,7 @@ from pyisda.date import default_accrual, previous_twentieth
from analytics.index import CreditIndex
from copy import copy
+
def get_outstanding_positions(trade_date):
r = dawn_engine.execute(
"SELECT security_id, notional, folder, nextredindexcode, currency, "
@@ -13,65 +14,83 @@ def get_outstanding_positions(trade_date):
"JOIN index_version_markit "
"ON a.security_id=index_version_markit.redindexcode "
"WHERE nextredindexcode IS NOT NULL",
- (trade_date,))
+ (trade_date,),
+ )
return r
+
def default_adjustment(company_id, end_date):
- r = serenitas_engine.execute("SELECT recovery, event_date, auction_date FROM defaulted "
- "WHERE id=%s", (company_id,))
+ r = serenitas_engine.execute(
+ "SELECT recovery, event_date, auction_date FROM defaulted " "WHERE id=%s",
+ (company_id,),
+ )
recovery, event_date, auction_date = next(r)
fee = 1 - recovery
start_date = previous_twentieth(event_date)
- accrual_days, _ = default_accrual(auction_date, event_date,
- start_date, end_date, 1., 1.)
+ accrual_days, _ = default_accrual(
+ auction_date, event_date, start_date, end_date, 1.0, 1.0
+ )
return accrual_days, fee
-PORTFOLIO = {"HYOPTDEL": "OPTIONS",
- "HEDGE_MBS": "MORTGAGES"}
+
+PORTFOLIO = {"HYOPTDEL": "OPTIONS", "HEDGE_MBS": "MORTGAGES"}
+
def rebook(trade_date, company_id):
upfront_settle_date = trade_date + 3 * BDay()
effective_date = trade_date + datetime.timedelta(days=1)
for r in get_outstanding_positions(trade_date):
- accrual_days, fee = default_adjustment(company_id, r['maturity'])
- index_new = CreditIndex(redcode=r['nextredindexcode'],
- maturity=r['maturity'],
- value_date=trade_date,
- notional=r['notional'])
+ accrual_days, fee = default_adjustment(company_id, r["maturity"])
+ index_new = CreditIndex(
+ redcode=r["nextredindexcode"],
+ maturity=r["maturity"],
+ value_date=trade_date,
+ notional=r["notional"],
+ )
- adj = (fee - accrual_days * index_new.fixed_rate * 1e-4 /360) * \
- r['notional'] * (r['indexfactor'] - index_new.factor)
+ adj = (
+ (fee - accrual_days * index_new.fixed_rate * 1e-4 / 360)
+ * r["notional"]
+ * (r["indexfactor"] - index_new.factor)
+ )
index_new.mark()
- trade_new = {'action': 'NEW',
- 'portfolio': PORTFOLIO[r['folder']],
- 'folder': r['folder'],
- 'cp_code': 'INTERCO',
- 'custodian': 'NONE',
- 'trade_date': trade_date,
- 'effective_date': effective_date,
- 'maturity': r['maturity'],
- 'currency': r['currency'],
- 'payment_rolldate': 'Following',
- 'notional': abs(r['notional']),
- 'fixed_rate': index_new.fixed_rate / 100,
- 'day_count': 'ACT/360',
- 'frequency': 4,
- 'protection': index_new.direction,
- 'security_id': r['nextredindexcode'],
- 'security_desc': f"CDX {index_new.index_type} CDSI S{index_new.series} 5Y",
- 'upfront': index_new.pv,
- 'upfront_settle_date': upfront_settle_date,
- 'swap_type': 'CD_INDEX'}
+ trade_new = {
+ "action": "NEW",
+ "portfolio": PORTFOLIO[r["folder"]],
+ "folder": r["folder"],
+ "cp_code": "INTERCO",
+ "custodian": "NONE",
+ "trade_date": trade_date,
+ "effective_date": effective_date,
+ "maturity": r["maturity"],
+ "currency": r["currency"],
+ "payment_rolldate": "Following",
+ "notional": abs(r["notional"]),
+ "fixed_rate": index_new.fixed_rate / 100,
+ "day_count": "ACT/360",
+ "frequency": 4,
+ "protection": index_new.direction,
+ "security_id": r["nextredindexcode"],
+ "security_desc": f"CDX {index_new.index_type} CDSI S{index_new.series} 5Y",
+ "upfront": index_new.pv,
+ "upfront_settle_date": upfront_settle_date,
+ "swap_type": "CD_INDEX",
+ }
trade_prev = copy(trade_new)
- trade_prev['protection'] = "Seller" if trade_new['protection'] == "Buyer" else "Buyer"
- trade_prev['upfront'] = adj - index_new.pv
- trade_prev['security_id'] = r['security_id']
- sql_str = (f"INSERT INTO cds({','.join(trade_new.keys())}) "
- f"VALUES({','.join(['%s'] * len(trade_new))})")
+ trade_prev["protection"] = (
+ "Seller" if trade_new["protection"] == "Buyer" else "Buyer"
+ )
+ trade_prev["upfront"] = adj - index_new.pv
+ trade_prev["security_id"] = r["security_id"]
+ sql_str = (
+ f"INSERT INTO cds({','.join(trade_new.keys())}) "
+ f"VALUES({','.join(['%s'] * len(trade_new))})"
+ )
dawn_engine.execute(sql_str, [trade_prev.values(), trade_new.values()])
+
if __name__ == "__main__":
# PKD
- #rebook(datetime.date(2019, 1, 24), 101148)
+ # rebook(datetime.date(2019, 1, 24), 101148)
# WINDSSE
rebook(datetime.date(2019, 4, 8), 36806879)
diff --git a/python/exchange.py b/python/exchange.py
index bb1b660c..fce17e2e 100644
--- a/python/exchange.py
+++ b/python/exchange.py
@@ -4,35 +4,40 @@ import json
def get_account(email_address):
- with open(Path('.credentials') / (email_address + '.json')) as fh:
+ with open(Path(".credentials") / (email_address + ".json")) as fh:
creds = json.load(fh)
credentials = Credentials(**creds)
- config = Configuration(server='autodiscover.lmcg.com', credentials=credentials)
- return Account(primary_smtp_address=email_address, config=config,
- autodiscover=False, access_type=DELEGATE)
+ config = Configuration(server="autodiscover.lmcg.com", credentials=credentials)
+ return Account(
+ primary_smtp_address=email_address,
+ config=config,
+ autodiscover=False,
+ access_type=DELEGATE,
+ )
+
class ExchangeMessage:
_account = get_account("ghorel@lmcg.com")
- def get_msgs(self, count=None,
- path=['GS', 'Swaptions'], **filters):
+ def get_msgs(self, count=None, path=["GS", "Swaptions"], **filters):
folder = self._account.inbox
for p in path:
folder /= p
folder = folder.filter(**filters)
if count:
- for msg in folder.all().order_by('-datetime_sent')[:count]:
+ for msg in folder.all().order_by("-datetime_sent")[:count]:
yield msg
else:
- for msg in folder.all().order_by('-datetime_sent'):
+ for msg in folder.all().order_by("-datetime_sent"):
yield msg
- def send_email(self, subject, body, to_recipients,
- cc_recipients):
- m = Message(account=self._account,
- folder=self._account.sent,
- subject=subject,
- body=body,
- to_recipients=to_recipients,
- cc_recipients=cc_recipients)
+ def send_email(self, subject, body, to_recipients, cc_recipients):
+ m = Message(
+ account=self._account,
+ folder=self._account.sent,
+ subject=subject,
+ body=body,
+ to_recipients=to_recipients,
+ cc_recipients=cc_recipients,
+ )
m.send_and_save()
diff --git a/python/external_deriv_marks.py b/python/external_deriv_marks.py
index 08d3418f..12cf9b41 100644
--- a/python/external_deriv_marks.py
+++ b/python/external_deriv_marks.py
@@ -7,17 +7,17 @@ from pathlib import Path
DAILY_DIR = Path(os.environ["DAILY_DIR"])
+
def gs_navs():
d = {}
for fname in (DAILY_DIR / "GS_reports").glob("Trade_Detail*.xls"):
try:
- df = pd.read_excel(fname, skiprows=9, skipfooter=77,
- index_col='Trade Id')
+ df = pd.read_excel(fname, skiprows=9, skipfooter=77, index_col="Trade Id")
except ValueError:
continue
- df['Trade Date'] = pd.to_datetime(df['Trade Date'])
- df = df[['Trade Date', 'Buy/Sell', 'Notional (USD)', 'NPV (USD)']]
- df.columns = ['trade_date', 'buy/sell', 'notional', 'nav']
+ df["Trade Date"] = pd.to_datetime(df["Trade Date"])
+ df = df[["Trade Date", "Buy/Sell", "Notional (USD)", "NPV (USD)"]]
+ df.columns = ["trade_date", "buy/sell", "notional", "nav"]
name = fname.name.replace("9972734", "")
m = re.match(r"[^\d]*(\d{2}_.{3}_\d{4})", name)
if m:
@@ -30,10 +30,12 @@ def gs_navs():
def ms_navs():
d = {}
for fname in (DAILY_DIR / "MS_reports").glob("Trade_Detail*.xls"):
- df = pd.read_excel(fname, index_col='trade_id')
+ df = pd.read_excel(fname, index_col="trade_id")
df.trade_date = pd.to_datetime(df.trade_date)
- df = df[['trade_date', 'pay_rec', 'notional_in_trade_ccy', 'exposure_in_rpt_ccy']]
- df.columns = ['trade_date', 'buy/sell', 'notional', 'nav']
+ df = df[
+ ["trade_date", "pay_rec", "notional_in_trade_ccy", "exposure_in_rpt_ccy"]
+ ]
+ df.columns = ["trade_date", "buy/sell", "notional", "nav"]
m = re.match(r"[^\d]*(\d{8})", fname.name)
if m:
date_string, = m.groups()
@@ -41,11 +43,15 @@ def ms_navs():
d[date] = df
return pd.concat(d)
+
if __name__ == "__main__":
for cp in ["MS", "GS"]:
df = globals()[f"{cp.lower()}_navs"]()
- with dbconn('dawndb') as conn:
+ with dbconn("dawndb") as conn:
with conn.cursor() as c:
- for k, v in df[['nav']].iterrows():
- c.execute("INSERT INTO external_marks_deriv "
- "VALUES(%s, %s, %s, %s) ON CONFLICT DO NOTHING", (*k, float(v), cp))
+ for k, v in df[["nav"]].iterrows():
+ c.execute(
+ "INSERT INTO external_marks_deriv "
+ "VALUES(%s, %s, %s, %s) ON CONFLICT DO NOTHING",
+ (*k, float(v), cp),
+ )
diff --git a/python/globeop_reports.py b/python/globeop_reports.py
index ffbd6a4a..b36d728f 100644
--- a/python/globeop_reports.py
+++ b/python/globeop_reports.py
@@ -10,122 +10,152 @@ import numpy as np
import datetime
-def get_monthly_pnl(group_by=['identifier']):
+def get_monthly_pnl(group_by=["identifier"]):
sql_string = "SELECT * FROM pnl_reports"
- df_pnl = pd.read_sql_query(sql_string, dawn_engine,
- parse_dates=['date'],
- index_col=['date'])
- df_pnl['identifier'] = df_pnl.invid.str.replace("_A$", "")
- pnl_cols = ['bookunrealmtm', 'bookrealmtm', 'bookrealincome', 'bookunrealincome', 'totalbookpl']
- monthend_pnl = df_pnl.groupby(pd.Grouper(freq='M'), group_keys=False).apply(lambda df: df.loc[df.index[-1]])
- return monthend_pnl.groupby(['date'] + group_by)[['mtd' + col for col in pnl_cols]].sum()
+ df_pnl = pd.read_sql_query(
+ sql_string, dawn_engine, parse_dates=["date"], index_col=["date"]
+ )
+ df_pnl["identifier"] = df_pnl.invid.str.replace("_A$", "")
+ pnl_cols = [
+ "bookunrealmtm",
+ "bookrealmtm",
+ "bookrealincome",
+ "bookunrealincome",
+ "totalbookpl",
+ ]
+ monthend_pnl = df_pnl.groupby(pd.Grouper(freq="M"), group_keys=False).apply(
+ lambda df: df.loc[df.index[-1]]
+ )
+ return monthend_pnl.groupby(["date"] + group_by)[
+ ["mtd" + col for col in pnl_cols]
+ ].sum()
def get_portfolio(report_date=None):
if report_date is not None:
sql_string = "SELECT * FROM valuation_reports where periodenddate = %s"
- df = pd.read_sql_query(sql_string, dawn_engine, parse_dates=['periodenddate'],
- index_col=['periodenddate'], params=[report_date,])
+ df = pd.read_sql_query(
+ sql_string,
+ dawn_engine,
+ parse_dates=["periodenddate"],
+ index_col=["periodenddate"],
+ params=[report_date],
+ )
else:
sql_string = "SELECT * FROM valuation_reports"
- df = pd.read_sql_query(sql_string, dawn_engine, parse_dates=['periodenddate'],
- index_col=['periodenddate'])
- df['identifier'] = df.invid.str.replace("_A$", "")
+ df = pd.read_sql_query(
+ sql_string,
+ dawn_engine,
+ parse_dates=["periodenddate"],
+ index_col=["periodenddate"],
+ )
+ df["identifier"] = df.invid.str.replace("_A$", "")
return df
def trade_performance():
sql_string = "SELECT * FROM bonds"
- df_trades = pd.read_sql_query(sql_string, dawn_engine,
- parse_dates={'lastupdate': {'utc': True},
- 'trade_date': {},
- 'settle_date': {}})
- df_trades = df_trades[df_trades['asset_class'] == 'Subprime']
+ df_trades = pd.read_sql_query(
+ sql_string,
+ dawn_engine,
+ parse_dates={"lastupdate": {"utc": True}, "trade_date": {}, "settle_date": {}},
+ )
+ df_trades = df_trades[df_trades["asset_class"] == "Subprime"]
df_pnl = get_monthly_pnl()
- df_sell = df_trades[~df_trades.buysell].groupby('identifier').last().reset_index()
+ df_sell = df_trades[~df_trades.buysell].groupby("identifier").last().reset_index()
df_sell.identifier = df_sell.identifier.str[:9]
- df_sell['trade_pnl_date'] = df_sell.trade_date + MonthEnd(0)
- df_buy = df_trades[df_trades.buysell].groupby('identifier').last().reset_index()
+ df_sell["trade_pnl_date"] = df_sell.trade_date + MonthEnd(0)
+ df_buy = df_trades[df_trades.buysell].groupby("identifier").last().reset_index()
- df_all = df_sell.merge(df_pnl.groupby('identifier').sum().reset_index(), on=['identifier'])
- df_all = df_all.merge(df_pnl.reset_index()[['date', 'identifier', 'mtdtotalbookpl']],
- left_on=['trade_pnl_date', 'identifier'],
- right_on=['date', 'identifier'],
- suffixes=('', '_at_trade_month'))
- df_all = df_all.drop(['date', 'trade_pnl_date'], axis=1)
+ df_all = df_sell.merge(
+ df_pnl.groupby("identifier").sum().reset_index(), on=["identifier"]
+ )
+ df_all = df_all.merge(
+ df_pnl.reset_index()[["date", "identifier", "mtdtotalbookpl"]],
+ left_on=["trade_pnl_date", "identifier"],
+ right_on=["date", "identifier"],
+ suffixes=("", "_at_trade_month"),
+ )
+ df_all = df_all.drop(["date", "trade_pnl_date"], axis=1)
- #now build up the table
- g = df_buy.groupby('identifier').sum()
+ # now build up the table
+ g = df_buy.groupby("identifier").sum()
init_inv = g.principal_payment + g.accrued_payment
- init_inv.name = 'initialinvestment'
+ init_inv.name = "initialinvestment"
- first_buy_date = df_buy.groupby('identifier').first().trade_date
- first_buy_date.name = 'firstbuydate'
+ first_buy_date = df_buy.groupby("identifier").first().trade_date
+ first_buy_date.name = "firstbuydate"
- df_all = df_all.join(init_inv, on='identifier')
- df_all = df_all.join(first_buy_date, on='identifier')
- df_all['percent_gain'] = df_all.mtdtotalbookpl / df_all.initialinvestment
- df_all['days_held'] = df_all.trade_date - df_all.firstbuydate
+ df_all = df_all.join(init_inv, on="identifier")
+ df_all = df_all.join(first_buy_date, on="identifier")
+ df_all["percent_gain"] = df_all.mtdtotalbookpl / df_all.initialinvestment
+ df_all["days_held"] = df_all.trade_date - df_all.firstbuydate
- df_all = df_all.sort_values('trade_date', ascending=False)
+ df_all = df_all.sort_values("trade_date", ascending=False)
return df_all
def get_net_navs():
sql_string = "SELECT * FROM valuation_reports"
- df_val = pd.read_sql_query(sql_string, dawn_engine, parse_dates=['periodenddate'])
- nav = df_val[df_val.fund == 'SERCGMAST'].groupby('periodenddate')['endbooknav'].sum()
- nav = nav.resample('M').last()
- df = pd.read_csv('/home/serenitas/edwin/Python/subscription_fee_data.csv',
- parse_dates=['date'],
- index_col =['date'])
- df.index = df.index.to_period('M').to_timestamp('M')
+ df_val = pd.read_sql_query(sql_string, dawn_engine, parse_dates=["periodenddate"])
+ nav = (
+ df_val[df_val.fund == "SERCGMAST"].groupby("periodenddate")["endbooknav"].sum()
+ )
+ nav = nav.resample("M").last()
+ df = pd.read_csv(
+ "/home/serenitas/edwin/Python/subscription_fee_data.csv",
+ parse_dates=["date"],
+ index_col=["date"],
+ )
+ df.index = df.index.to_period("M").to_timestamp("M")
df = df.join(nav)
- df['begbooknav'] = (df.endbooknav + df.net_flow).shift(1)
- df.at[('2013-01-31', 'begbooknav')] = 12500000
+ df["begbooknav"] = (df.endbooknav + df.net_flow).shift(1)
+ df.at[("2013-01-31", "begbooknav")] = 12500000
return df
def shift_cash(date, amount, df, strat):
nav = get_net_navs()
- df.loc[date, strat] = df.loc[date, strat] - amount/nav.loc[date].endbooknav
- df.loc[date,'Cash'] = df.loc[date, 'Cash'] + amount/nav.loc[date].endbooknav
+ df.loc[date, strat] = df.loc[date, strat] - amount / nav.loc[date].endbooknav
+ df.loc[date, "Cash"] = df.loc[date, "Cash"] + amount / nav.loc[date].endbooknav
return df
def calc_trade_performance_stats():
- df = trade_performance().set_index('trade_date')
+ df = trade_performance().set_index("trade_date")
df.days_held = df.days_held.dt.days
- df['winners'] = df.percent_gain > 0
- df['curr_face'] = df.principal_payment/(df.price/100)
+ df["winners"] = df.percent_gain > 0
+ df["curr_face"] = df.principal_payment / (df.price / 100)
- index = ['All', '2017', '2016', '2015', '2014', '2013']
+ index = ["All", "2017", "2016", "2015", "2014", "2013"]
results = pd.DataFrame(index=index)
win_per = df.winners.mean()
loss_per = 1 - win_per
- for x, df1 in df.groupby('winners'):
- for y, df2 in df1.groupby(pd.Grouper(freq='A')):
+ for x, df1 in df.groupby("winners"):
+ for y, df2 in df1.groupby(pd.Grouper(freq="A")):
y = y.date().year
- results.loc[y] = df2[df2.days_held.notnull()].mean()[['curr_face', 'initialinvestment', 'days_held']]
+ results.loc[y] = df2[df2.days_held.notnull()].mean()[
+ ["curr_face", "initialinvestment", "days_held"]
+ ]
- df[df.days_held.notnull()]['days_held'].groupby(pd.Grouper(freq='A')).mean()
+ df[df.days_held.notnull()]["days_held"].groupby(pd.Grouper(freq="A")).mean()
-def hist_pos(asset_class = 'rmbs', dm=False):
+def hist_pos(asset_class="rmbs", dm=False):
end_date = pd.datetime.today() - MonthEnd(1)
- dates = pd.date_range(datetime.date(2013,1,31), end_date, freq='M')
+ dates = pd.date_range(datetime.date(2013, 1, 31), end_date, freq="M")
calc_df = pd.DataFrame()
for d in dates:
- if asset_class == 'rmbs':
- if d.date() == datetime.date(2018,11,30):
- d_1 = datetime.date(2018,12,3)
- elif d.date() == datetime.date(2016,2,29):
- d_1 = datetime.date(2016,2,29)
+ if asset_class == "rmbs":
+ if d.date() == datetime.date(2018, 11, 30):
+ d_1 = datetime.date(2018, 12, 3)
+ elif d.date() == datetime.date(2016, 2, 29):
+ d_1 = datetime.date(2016, 2, 29)
else:
d_1 = None
calc_df = calc_df.append(rmbs_pos(d, d_1, dm))
@@ -133,92 +163,118 @@ def hist_pos(asset_class = 'rmbs', dm=False):
calc_df = calc_df.append(clo_pos(d), sort=True)
return calc_df
+
def rmbs_pos(date, model_date=None, dm=False):
date = date.date() if isinstance(date, pd.Timestamp) else date
- #just non-zero factor bonds for now, need to incorporate that
+ # just non-zero factor bonds for now, need to incorporate that
pos = get_portfolio(date)
- pos = pos[(pos.port == 'MORTGAGES') &
- (pos.endbookmv > 0) &
- (pos.custacctname == 'V0NSCLMAMB') &
- (pos['invid'].str.len() == 9)]
- pos = pos[['endbookmv', 'endlocalmarketprice', 'identifier']]
+ pos = pos[
+ (pos.port == "MORTGAGES")
+ & (pos.endbookmv > 0)
+ & (pos.custacctname == "V0NSCLMAMB")
+ & (pos["invid"].str.len() == 9)
+ ]
+ pos = pos[["endbookmv", "endlocalmarketprice", "identifier"]]
- sql_string = ("SELECT distinct timestamp FROM priced where "
- "normalization = 'current_notional' and "
- "model_version = 1 and "
- "date(timestamp) < %s and date(timestamp) > %s "
- "order by timestamp desc")
- timestamps = pd.read_sql_query(sql_string, dawn_engine, parse_dates=['timestamp'],
- params=[date, date - DateOffset(15, 'D')])
+ sql_string = (
+ "SELECT distinct timestamp FROM priced where "
+ "normalization = 'current_notional' and "
+ "model_version = 1 and "
+ "date(timestamp) < %s and date(timestamp) > %s "
+ "order by timestamp desc"
+ )
+ timestamps = pd.read_sql_query(
+ sql_string,
+ dawn_engine,
+ parse_dates=["timestamp"],
+ params=[date, date - DateOffset(15, "D")],
+ )
if model_date is None:
model_date = (timestamps.loc[0][0]).date()
yc = YieldTermStructure()
yc.link_to(YC(evaluation_date=model_date))
- sql_string = ("SELECT date(timestamp) as timestamp, cusip, model_version, "
- "pv, moddur, delta_yield, delta_ir "
- "FROM priced where date(timestamp) = %s "
- "and model_version <> 2")
+ sql_string = (
+ "SELECT date(timestamp) as timestamp, cusip, model_version, "
+ "pv, moddur, delta_yield, delta_ir "
+ "FROM priced where date(timestamp) = %s "
+ "and model_version <> 2"
+ )
params_list = (model_date,)
if date > datetime.date(2017, 9, 30):
- r = dawn_engine.execute("SELECT latest_sim FROM latest_sim(%s)",
- model_date)
+ r = dawn_engine.execute("SELECT latest_sim FROM latest_sim(%s)", model_date)
model_id, = next(r)
sql_string += " AND model_id_sub = %s"
params_list += (model_id,)
- model = pd.read_sql_query(sql_string, dawn_engine, parse_dates=['timestamp'],
- params=params_list)
- model = model[model['pv'] != 0]
- pos = pos.assign(curr_ntl = pos.endbookmv/pos.endlocalmarketprice *100)
- comb_g = pos.groupby('identifier').agg({'endbookmv': 'sum',
- 'curr_ntl': 'sum'})
- comb_g['date'] = pd.to_datetime(date)
- model = pd.merge(comb_g, model, left_on='identifier', right_on='cusip')
- positions = model.set_index(['cusip', 'model_version']).unstack(1).dropna()
- v1 = positions.xs(1, level='model_version', axis=1)
- v3 = positions.xs(3, level='model_version', axis=1)
+ model = pd.read_sql_query(
+ sql_string, dawn_engine, parse_dates=["timestamp"], params=params_list
+ )
+ model = model[model["pv"] != 0]
+ pos = pos.assign(curr_ntl=pos.endbookmv / pos.endlocalmarketprice * 100)
+ comb_g = pos.groupby("identifier").agg({"endbookmv": "sum", "curr_ntl": "sum"})
+ comb_g["date"] = pd.to_datetime(date)
+ model = pd.merge(comb_g, model, left_on="identifier", right_on="cusip")
+ positions = model.set_index(["cusip", "model_version"]).unstack(1).dropna()
+ v1 = positions.xs(1, level="model_version", axis=1)
+ v3 = positions.xs(3, level="model_version", axis=1)
if dm is True:
libor1m = USDLibor(Period(1, Months), yc)
- libor = libor1m.fixing(libor1m.fixing_calendar.adjust(Date.from_datetime(model_date)))
- v3 = v3.assign(b_yield = v3.moddur.apply(lambda x: float(yc.zero_rate(x))) -
- libor)
+ libor = libor1m.fixing(
+ libor1m.fixing_calendar.adjust(Date.from_datetime(model_date))
+ )
+ v3 = v3.assign(
+ b_yield=v3.moddur.apply(lambda x: float(yc.zero_rate(x))) - libor
+ )
else:
- v3 = v3.assign(b_yield = v3.moddur.apply(lambda x: float(yc.zero_rate(x))))
- v3.b_yield += np.minimum((v1.pv * v1.curr_ntl/ v1.endbookmv)
- ** (1/v1.moddur) - 1, 1).dropna()
+ v3 = v3.assign(b_yield=v3.moddur.apply(lambda x: float(yc.zero_rate(x))))
+ v3.b_yield += np.minimum(
+ (v1.pv * v1.curr_ntl / v1.endbookmv) ** (1 / v1.moddur) - 1, 1
+ ).dropna()
v3.delta_yield *= v3.endbookmv / v3.pv
- v3.delta_ir *= np.minimum(1, 1/v3.moddur) * \
- (v3.endbookmv/v3.curr_ntl)/ v3.pv * v3.curr_ntl
- return v3.reset_index().set_index('date')
+ v3.delta_ir *= (
+ np.minimum(1, 1 / v3.moddur)
+ * (v3.endbookmv / v3.curr_ntl)
+ / v3.pv
+ * v3.curr_ntl
+ )
+ return v3.reset_index().set_index("date")
+
def clo_pos(date):
date = date.date() if isinstance(date, pd.Timestamp) else date
df = get_portfolio(date)
- df = df[(df.port == 'CLO') &
- (df.endbookmv > 0) &
- (df.custacctname == 'V0NSCLMAMB') &
- (df['invid'].str.len() >= 9)]
- df = df[['endbookmv', 'endlocalmarketprice', 'identifier']]
+ df = df[
+ (df.port == "CLO")
+ & (df.endbookmv > 0)
+ & (df.custacctname == "V0NSCLMAMB")
+ & (df["invid"].str.len() >= 9)
+ ]
+ df = df[["endbookmv", "endlocalmarketprice", "identifier"]]
if df.empty is True:
return df
else:
- sql_string = "select distinct cusip, identifier from bonds where asset_class = 'CLO'"
- cusip_map = {identifier: cusip for cusip, identifier in dawn_engine.execute(sql_string)}
- df['cusip'] = df['identifier'].replace(cusip_map)
- cusips = df.loc[[df.index[-1]], 'cusip']
+ sql_string = (
+ "select distinct cusip, identifier from bonds where asset_class = 'CLO'"
+ )
+ cusip_map = {
+ identifier: cusip for cusip, identifier in dawn_engine.execute(sql_string)
+ }
+ df["cusip"] = df["identifier"].replace(cusip_map)
+ cusips = df.loc[[df.index[-1]], "cusip"]
placeholders = ",".join(["%s"] * (1 + len(cusips)))
sql_string = f"SELECT * FROM historical_cusip_risk({placeholders})"
- etengine = dbengine('etdb')
- model = pd.read_sql_query(sql_string, etengine, parse_dates=['pricingdate'],
- params=(date, *cusips))
+ etengine = dbengine("etdb")
+ model = pd.read_sql_query(
+ sql_string, etengine, parse_dates=["pricingdate"], params=(date, *cusips)
+ )
model.index = cusips
- calc_df = df.loc[[df.index[-1]]].set_index('cusip').join(model)
- calc_df['hy_equiv'] = calc_df.delta * calc_df.endbookmv
- calc_df['date'] = date
- return calc_df.set_index('date')
+ calc_df = df.loc[[df.index[-1]]].set_index("cusip").join(model)
+ calc_df["hy_equiv"] = calc_df.delta * calc_df.endbookmv
+ calc_df["date"] = date
+ return calc_df.set_index("date")
diff --git a/python/gmail_helpers.py b/python/gmail_helpers.py
index 37cf9cbe..7b47ce10 100644
--- a/python/gmail_helpers.py
+++ b/python/gmail_helpers.py
@@ -11,9 +11,10 @@ import json
import oauth2client
import os
-SCOPES = ['https://www.googleapis.com/auth/gmail.modify']
-CLIENT_SECRET_FILE = 'secret.json'
-APPLICATION_NAME = 'Swaptions'
+SCOPES = ["https://www.googleapis.com/auth/gmail.modify"]
+CLIENT_SECRET_FILE = "secret.json"
+APPLICATION_NAME = "Swaptions"
+
def get_gmail_service():
"""Gets valid user credentials from storage.
@@ -24,25 +25,35 @@ def get_gmail_service():
Returns:
Credentials, the obtained credential.
"""
- credential_dir = '.credentials'
+ credential_dir = ".credentials"
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
- credential_path = os.path.join(credential_dir,
- 'guillaume.horel@serenitascapital.com.json')
+ credential_path = os.path.join(
+ credential_dir, "guillaume.horel@serenitascapital.com.json"
+ )
try:
credentials = Credentials.from_authorized_user_file(credential_path)
except:
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRET_FILE, SCOPES)
credentials = flow.run_console()
- to_save= {}
- for attr in ["token", "refresh_token", "id_token", "token_uri", "client_id", "client_secret", "scopes"]:
+ to_save = {}
+ for attr in [
+ "token",
+ "refresh_token",
+ "id_token",
+ "token_uri",
+ "client_id",
+ "client_secret",
+ "scopes",
+ ]:
to_save[attr] = getattr(credentials, attr)
with open(credential_path, "w") as fh:
json.dump(to_save, fh)
- service = build('gmail', 'v1', credentials=credentials)
+ service = build("gmail", "v1", credentials=credentials)
return service
+
def ListMessagesWithLabels(service, user_id, label_ids=[]):
"""List all Messages of the user's mailbox with label_ids applied.
@@ -58,19 +69,26 @@ def ListMessagesWithLabels(service, user_id, label_ids=[]):
appropriate id to get the details of a Message.
"""
try:
- response = service.users().messages().list(userId=user_id,
- labelIds=label_ids).execute()
- if 'messages' in response:
- yield from response['messages']
- while 'nextPageToken' in response:
- page_token = response['nextPageToken']
- response = service.users().messages().list(userId=user_id,
- labelIds=label_ids,
- pageToken=page_token).execute()
- yield from response['messages']
+ response = (
+ service.users()
+ .messages()
+ .list(userId=user_id, labelIds=label_ids)
+ .execute()
+ )
+ if "messages" in response:
+ yield from response["messages"]
+ while "nextPageToken" in response:
+ page_token = response["nextPageToken"]
+ response = (
+ service.users()
+ .messages()
+ .list(userId=user_id, labelIds=label_ids, pageToken=page_token)
+ .execute()
+ )
+ yield from response["messages"]
except errors.HttpError as error:
- print(json.loads(error.content.decode('utf-8'))['error']['message'])
+ print(json.loads(error.content.decode("utf-8"))["error"]["message"])
def ListHistory(service, user_id, label_id=None, start_history_id=10000):
@@ -85,26 +103,36 @@ def ListHistory(service, user_id, label_id=None, start_history_id=10000):
Returns:
A list of mailbox changes that occurred after the start_history_id.
"""
- history = (service.users().history().list(userId=user_id,
- startHistoryId=start_history_id,
- historyTypes="messageAdded",
- labelId=label_id)
- .execute())
- changes = history['history'] if 'history' in history else []
+ history = (
+ service.users()
+ .history()
+ .list(
+ userId=user_id,
+ startHistoryId=start_history_id,
+ historyTypes="messageAdded",
+ labelId=label_id,
+ )
+ .execute()
+ )
+ changes = history["history"] if "history" in history else []
for change in changes:
- if 'messagesAdded' in change:
- for c in change['messagesAdded']:
- yield c['message']
+ if "messagesAdded" in change:
+ for c in change["messagesAdded"]:
+ yield c["message"]
+
+ while "nextPageToken" in history:
+ page_token = history["nextPageToken"]
+ history = (
+ service.users()
+ .history()
+ .list(userId=user_id, startHistoryId=start_history_id, pageToken=page_token)
+ .execute()
+ )
+ for change in history["history"]:
+ if "messagesAdded" in change:
+ for c in change["messagesAdded"]:
+ yield c["message"]
- while 'nextPageToken' in history:
- page_token = history['nextPageToken']
- history = (service.users().history().list(userId=user_id,
- startHistoryId=start_history_id,
- pageToken=page_token).execute())
- for change in history['history']:
- if 'messagesAdded' in change:
- for c in change['messagesAdded']:
- yield c['message']
def labels_dict(service, user_id):
"""Returns a dictionary mapping labels to labelids.
@@ -118,10 +146,11 @@ def labels_dict(service, user_id):
"""
try:
response = service.users().labels().list(userId=user_id).execute()
- labels = response['labels']
- return {label['name']: label['id'] for label in labels}
+ labels = response["labels"]
+ return {label["name"]: label["id"] for label in labels}
except errors.HttpError as error:
- print(json.loads(error.content.decode('utf-8'))['error']['message'])
+ print(json.loads(error.content.decode("utf-8"))["error"]["message"])
+
class GmailMessage(EmailMessage):
_service = None
@@ -132,44 +161,55 @@ class GmailMessage(EmailMessage):
if GmailMessage._service is None:
GmailMessage._service = get_gmail_service()
if GmailMessage._labels is None:
- GmailMessage._labels = labels_dict(self._service, 'me')
+ GmailMessage._labels = labels_dict(self._service, "me")
def msgdict(self):
- return {'raw': base64.urlsafe_b64encode(self.as_bytes()).decode()}
+ return {"raw": base64.urlsafe_b64encode(self.as_bytes()).decode()}
def send(self):
try:
- message = (self._service.users().messages().
- send(userId='me',body=self.msgdict())
- .execute())
- print('Message Id: %s' % message['id'])
+ message = (
+ self._service.users()
+ .messages()
+ .send(userId="me", body=self.msgdict())
+ .execute()
+ )
+ print("Message Id: %s" % message["id"])
except errors.HttpError as error:
- print('An error occurred: %s' % error)
+ print("An error occurred: %s" % error)
@classmethod
def list_msg_ids(cls, label, start_history_id=None):
if start_history_id is not None:
- return ListHistory(cls._service,
- 'me',
- label_id=cls._labels[label],
- start_history_id=start_history_id)
+ return ListHistory(
+ cls._service,
+ "me",
+ label_id=cls._labels[label],
+ start_history_id=start_history_id,
+ )
else:
- return ListMessagesWithLabels(cls._service,
- 'me',
- label_ids=[cls._labels[label]])
+ return ListMessagesWithLabels(
+ cls._service, "me", label_ids=[cls._labels[label]]
+ )
@classmethod
- def from_id(cls, msg_id, user_id='me'):
+ def from_id(cls, msg_id, user_id="me"):
try:
- message = (cls._service.users().messages().
- get(userId=user_id, id=msg_id, format='raw').execute())
+ message = (
+ cls._service.users()
+ .messages()
+ .get(userId=user_id, id=msg_id, format="raw")
+ .execute()
+ )
instance = email.message_from_bytes(
- base64.urlsafe_b64decode(message['raw']),
- policy=email.policy.EmailPolicy())
- instance.history_id = message['historyId']
+ base64.urlsafe_b64decode(message["raw"]),
+ policy=email.policy.EmailPolicy(),
+ )
+ instance.history_id = message["historyId"]
return instance
except errors.HttpError as error:
- print(json.loads(error.content.decode('utf-8'))['error']['message'])
+ print(json.loads(error.content.decode("utf-8"))["error"]["message"])
+
def main():
"""Shows basic usage of the Gmail API.
@@ -179,9 +219,10 @@ def main():
"""
message = GmailMessage()
message.set_content("Hello everyone!")
- message['To'] = 'guillaume.horel@gmail.com'
- message['Subject'] = 'pomme'
+ message["To"] = "guillaume.horel@gmail.com"
+ message["Subject"] = "pomme"
message.send()
-if __name__ == '__main__':
+
+if __name__ == "__main__":
message = main()
diff --git a/python/handle_default.py b/python/handle_default.py
index 426df961..85eb852b 100644
--- a/python/handle_default.py
+++ b/python/handle_default.py
@@ -2,26 +2,33 @@ from utils.db import serenitas_pool
import datetime
from sys import argv
+
def get_recovery(company_id: int, seniority: str, conn):
with conn.cursor() as c:
- c.execute("SELECT recovery * 100, auction_date FROM defaulted "
- "WHERE id=%s AND seniority=%s",
- (company_id, seniority))
+ c.execute(
+ "SELECT recovery * 100, auction_date FROM defaulted "
+ "WHERE id=%s AND seniority=%s",
+ (company_id, seniority),
+ )
return c.fetchone()
+
def affected_indices(company_id: int, seniority: str, conn):
"""returns the list of indices containing company_id"""
- sqlstr = ("SELECT b.*, a.curr_weight*b.indexfactor AS orig_weight "
- "FROM basket_constituents_current a "
- "JOIN (SELECT * FROM index_version WHERE lastdate='infinity') b "
- "USING (basketid) "
- "WHERE company_id=%s AND seniority=%s")
+ sqlstr = (
+ "SELECT b.*, a.curr_weight*b.indexfactor AS orig_weight "
+ "FROM basket_constituents_current a "
+ "JOIN (SELECT * FROM index_version WHERE lastdate='infinity') b "
+ "USING (basketid) "
+ "WHERE company_id=%s AND seniority=%s"
+ )
print(sqlstr)
with conn.cursor() as c:
c.execute(sqlstr, (company_id, seniority))
recordslist = c.fetchall()
return recordslist
+
def create_newindices(recordslist, recovery, lastdate, conn):
"""create the new indices versions and update the old"""
insertstr = """INSERT INTO index_version(Index, Series, Version, IndexFactor,
@@ -33,36 +40,40 @@ def create_newindices(recordslist, recovery, lastdate, conn):
with conn.cursor() as c:
newids = {}
for r in recordslist:
- r['indexfactor'] -= r['orig_weight']
- r['version'] += 1
- r['cumulativeloss'] += (100-recovery) * r['orig_weight'] /100
- r['lastdate'] = 'infinity'
+ r["indexfactor"] -= r["orig_weight"]
+ r["version"] += 1
+ r["cumulativeloss"] += (100 - recovery) * r["orig_weight"] / 100
+ r["lastdate"] = "infinity"
c.execute(insertstr, r)
- newids[r['basketid']] = c.fetchone()[0]
+ newids[r["basketid"]] = c.fetchone()[0]
for oldid in newids.keys():
c.execute(updatestr, (lastdate, oldid))
conn.commit()
return newids
+
def update_indexmembers(newids, company_id, seniority, conn):
with conn.cursor() as c:
for oldid, newid in newids.items():
- c.execute("INSERT INTO basket_constituents "
- "(SELECT company_id, seniority, %s, weight "
- "FROM basket_constituents "
- "WHERE basketid=%s AND NOT (company_id=%s AND seniority=%s))",
- (newid, oldid, company_id, seniority))
+ c.execute(
+ "INSERT INTO basket_constituents "
+ "(SELECT company_id, seniority, %s, weight "
+ "FROM basket_constituents "
+ "WHERE basketid=%s AND NOT (company_id=%s AND seniority=%s))",
+ (newid, oldid, company_id, seniority),
+ )
conn.commit()
+
def update_redcodes(index_type, conn):
- if index_type == 'HY':
- index_subfamily = 'CDX.NA.HY'
- elif index_type == 'IG':
- index_subfamily = 'CDX.NA.IG'
- elif index_type == 'EU':
- index_subfamily = 'iTraxx Europe'
- elif index_type == 'XO':
- index_subfamily = 'iTraxx Europe Crossover'
+ if index_type == "HY":
+ index_subfamily = "CDX.NA.HY"
+ elif index_type == "IG":
+ index_subfamily = "CDX.NA.IG"
+ elif index_type == "EU":
+ index_subfamily = "iTraxx Europe"
+ elif index_type == "XO":
+ index_subfamily = "iTraxx Europe Crossover"
with conn.cursor() as c:
c.execute(
@@ -72,16 +83,19 @@ def update_redcodes(index_type, conn):
"AND index_version.index=%s "
"AND index_version_markit.indexsubfamily=%s"
"AND index_version.redindexcode IS NULL",
- (index_type, index_subfamily))
+ (index_type, index_subfamily),
+ )
conn.commit()
-if __name__=="__main__":
+if __name__ == "__main__":
if len(argv) == 1:
- print("""Usage:
+ print(
+ """Usage:
python handle_default.py <company_id> <seniority>
For instance:
- python handle_default.py 210065 Senior""")
+ python handle_default.py 210065 Senior"""
+ )
else:
conn = serenitas_pool.getconn()
company_id = int(argv[1])
@@ -90,4 +104,4 @@ For instance:
recordslist = affected_indices(company_id, seniority, conn)
newids = create_newindices(recordslist, recovery, lastdate, conn)
update_indexmembers(newids, company_id, seniority, conn)
- serenitas_pool.putconn(conn,)
+ serenitas_pool.putconn(conn)
diff --git a/python/load_globeop_report.py b/python/load_globeop_report.py
index ec305a90..93d8e889 100644
--- a/python/load_globeop_report.py
+++ b/python/load_globeop_report.py
@@ -6,32 +6,38 @@ from dates import bus_day
from utils.db import dbengine
import datetime
-def get_globs(fname, years=['2013', '2014', '2015', '2016', '2017']):
- basedir = '/home/serenitas/Daily'
- globs = [iglob(os.path.join(basedir,
- year,
- "{0}_*/{0}*/Reports/{1}.csv".format(year, fname)))
- for year in years]
+
+def get_globs(fname, years=["2013", "2014", "2015", "2016", "2017"]):
+ basedir = "/home/serenitas/Daily"
+ globs = [
+ iglob(
+ os.path.join(
+ basedir, year, "{0}_*/{0}*/Reports/{1}.csv".format(year, fname)
+ )
+ )
+ for year in years
+ ]
for year in years[-2:]:
- globs.append(iglob(os.path.join(basedir,
- '{0}-*/Reports/{1}.csv'.format(year,
- fname))))
+ globs.append(
+ iglob(os.path.join(basedir, "{0}-*/Reports/{1}.csv".format(year, fname)))
+ )
return globs
+
def read_valuation_report(f):
date = pd.Timestamp(f.parts[4])
- if date >= pd.Timestamp('2013-02-06'):
- df = pd.read_csv(f, parse_dates=['KnowledgeDate', 'PeriodEndDate'])
+ if date >= pd.Timestamp("2013-02-06"):
+ df = pd.read_csv(f, parse_dates=["KnowledgeDate", "PeriodEndDate"])
else:
df = pd.read_csv(f)
- df['KnowledgeDate'] = date
- df['PeriodEndDate'] = date - bus_day
- df['row'] = df.index
- if 'AccountingPeriod' in df:
- del df['AccountingPeriod']
- if 'CounterParty' in df:
- del df['CounterParty']
- df = df.rename(columns={'CounterPartyCode': 'counterparty'})
+ df["KnowledgeDate"] = date
+ df["PeriodEndDate"] = date - bus_day
+ df["row"] = df.index
+ if "AccountingPeriod" in df:
+ del df["AccountingPeriod"]
+ if "CounterParty" in df:
+ del df["CounterParty"]
+ df = df.rename(columns={"CounterPartyCode": "counterparty"})
if "Strat" in df:
df.Strat = df.Strat.str.replace("^(SERCGMAST__){1,2}(M_|SER_)?", "", 1)
df = df.replace({"Strat": {"TCDSCSH": "TCSH", "MTG_CRT_LD": "CRT_LD"}})
@@ -40,129 +46,181 @@ def read_valuation_report(f):
df.columns = df.columns.str.lower()
return df
+
def valuation_reports():
- df = pd.concat(read_valuation_report(f) for f in
- chain.from_iterable(get_globs('Valuation_Report')))
+ df = pd.concat(
+ read_valuation_report(f)
+ for f in chain.from_iterable(get_globs("Valuation_Report"))
+ )
# There can be duplicates in case of holidays
- df = df.sort_values(['periodenddate', 'row', 'knowledgedate'])
- df = df.drop_duplicates(['periodenddate', 'row'], 'last')
- df.to_sql('valuation_reports', dbengine('dawndb'), if_exists='append', index=False)
+ df = df.sort_values(["periodenddate", "row", "knowledgedate"])
+ df = df.drop_duplicates(["periodenddate", "row"], "last")
+ df.to_sql("valuation_reports", dbengine("dawndb"), if_exists="append", index=False)
+
def read_pnl_report(f):
df = pd.read_csv(f)
df.Strat = df.Strat.str.replace("^(SERCGMAST__){1,2}(M_|SER_)?", "", 1)
df = df.replace({"Strat": {"TCDSCSH": "TCSH", "MTG_CRT_LD": "CRT_LD"}})
df.Port = df.Port.str.replace("^(SERCGMAST__){1,2}(SERG__|SERG_)?", "", 1)
- df['LongShortIndicator'] = df['LongShortIndicator'].str.strip()
+ df["LongShortIndicator"] = df["LongShortIndicator"].str.strip()
df.columns = df.columns.str.lower().str.replace(" ", "")
return df
+
def pnl_reports():
df = {}
- for f in chain.from_iterable(get_globs('Pnl*')):
+ for f in chain.from_iterable(get_globs("Pnl*")):
if not (f.endswith("Pnl.csv") and f.endswith("Pnl_Report.csv")):
continue
- date = pd.Timestamp(f.rsplit('/', 3)[1])
+ date = pd.Timestamp(f.rsplit("/", 3)[1])
date = date - bus_day
df[date] = read_pnl_report(f)
- df = pd.concat(df, names=['date', 'row']).reset_index()
- df.to_sql('pnl_reports', dbengine('dawndb'), if_exists='append', index=False)
+ df = pd.concat(df, names=["date", "row"]).reset_index()
+ df.to_sql("pnl_reports", dbengine("dawndb"), if_exists="append", index=False)
+
def read_cds_report(f):
df = pd.read_csv(f)
df2 = pd.read_csv(f.parent / "All_Report.csv")
+
def drop_zero_count(df):
for col in df:
vc = len(df[col].value_counts())
if vc == 0:
del df[col]
continue
+
drop_zero_count(df)
drop_zero_count(df2)
- contract = df['Contractual Definition']
- contract = contract.where(contract.isin(['ISDA2014', 'ISDA2003Cred']), 'ISDA2014')
- df['Contractual Definition'] = contract
- to_drop = ['Bloomberg Yellow key', 'Created User', 'Last Modified User',
- 'Last Modified Date', 'Fund Long Name', 'Instrument Sub Type',
- 'Netting Id', 'Client', 'Trade Status', 'Position Status',
- 'Clearing Broker', 'Settle Mode', 'Off Price', 'On Price',
- 'Price Ccy', 'VAT', 'SEC Fee', 'Clearing Fee',
- 'Trading Notional', 'BBGID']
- df = df.drop(to_drop,
- axis=1, errors='ignore')
- df2 = df2.drop(to_drop,
- axis=1, errors='ignore')
+ contract = df["Contractual Definition"]
+ contract = contract.where(contract.isin(["ISDA2014", "ISDA2003Cred"]), "ISDA2014")
+ df["Contractual Definition"] = contract
+ to_drop = [
+ "Bloomberg Yellow key",
+ "Created User",
+ "Last Modified User",
+ "Last Modified Date",
+ "Fund Long Name",
+ "Instrument Sub Type",
+ "Netting Id",
+ "Client",
+ "Trade Status",
+ "Position Status",
+ "Clearing Broker",
+ "Settle Mode",
+ "Off Price",
+ "On Price",
+ "Price Ccy",
+ "VAT",
+ "SEC Fee",
+ "Clearing Fee",
+ "Trading Notional",
+ "BBGID",
+ ]
+ df = df.drop(to_drop, axis=1, errors="ignore")
+ df2 = df2.drop(to_drop, axis=1, errors="ignore")
df.columns = df.columns.str.lower().str.replace(" ", "_")
df2.columns = df2.columns.str.lower().str.replace(" ", "_")
df.calendar = df.calendar.str.replace(" ", "")
- df = df.rename(columns={'direction': 'buy/sell'})
+ df = df.rename(columns={"direction": "buy/sell"})
df.roll_convention = df.roll_convention.str.title()
- df = df[df.strategy != 'SER_TEST']
- df.loc[df.strategy == 'SERCGMAST__MBSCDS', 'strategy'] = 'MBSCDS'
+ df = df[df.strategy != "SER_TEST"]
+ df.loc[df.strategy == "SERCGMAST__MBSCDS", "strategy"] = "MBSCDS"
df.strategy = df.strategy.str.replace("SER_", "")
- df['buy/sell'] = df['buy/sell'].astype('category')
- df['buy/sell'].cat.categories = ['Buyer', 'Seller']
- del df['independent_%']
- df2 = df2.rename(columns={'independent_%': 'independent_perc'})
- df.prime_broker = df.prime_broker.where(df.prime_broker != 'NONE')
- return df.set_index('gtid').join(df2.set_index('gtid')[
- df2.columns.difference(df.columns)]).reset_index()
+ df["buy/sell"] = df["buy/sell"].astype("category")
+ df["buy/sell"].cat.categories = ["Buyer", "Seller"]
+ del df["independent_%"]
+ df2 = df2.rename(columns={"independent_%": "independent_perc"})
+ df.prime_broker = df.prime_broker.where(df.prime_broker != "NONE")
+ return (
+ df.set_index("gtid")
+ .join(df2.set_index("gtid")[df2.columns.difference(df.columns)])
+ .reset_index()
+ )
+
def read_swaption_report(f):
df = pd.read_csv(f)
df2 = pd.read_csv(f.parent / "All_Report.csv")
+
def drop_zero_count(df):
for k, v in df.iteritems():
if len(v.value_counts()) == 0:
del df[k]
+
drop_zero_count(df)
drop_zero_count(df2)
# df2 = df2[df2["Product Sub Type"] == "CD_INDEX_OPTION"]
# df = df[df["Product Sub Type"] == "CD_INDEX_OPTION"]
df = df.set_index("GTID").join(df2.set_index("GTID")[["Geneva ID"]])
- for key in ['Created User', 'Last Modified User',
- 'Last Modified Date', 'Trade Status', 'Position Status',
- 'Client', 'External Trade ID', 'Fund', 'Fund Long Name',
- 'Prime Broker', 'Transaction Status', 'Created Date', 'Comments',
- 'Trade Type']:
+ for key in [
+ "Created User",
+ "Last Modified User",
+ "Last Modified Date",
+ "Trade Status",
+ "Position Status",
+ "Client",
+ "External Trade ID",
+ "Fund",
+ "Fund Long Name",
+ "Prime Broker",
+ "Transaction Status",
+ "Created Date",
+ "Comments",
+ "Trade Type",
+ ]:
del df[key]
for k, v in df.iteritems():
if "Date" in k and "End Date" not in k:
df[k] = pd.to_datetime(v)
return df
+
def cds_reports():
df = {}
- for f in chain.from_iterable(get_globs('CDS_Report')):
- date = pd.Timestamp(f.rsplit('/', 3)[1])
- old_report = date <= pd.Timestamp('2017-02-28') or date == pd.Timestamp('2017-03-02')
+ for f in chain.from_iterable(get_globs("CDS_Report")):
+ date = pd.Timestamp(f.rsplit("/", 3)[1])
+ old_report = date <= pd.Timestamp("2017-02-28") or date == pd.Timestamp(
+ "2017-03-02"
+ )
date = date - bus_day
df[date] = read_cds_report(f, old_report)
- df = pd.concat(df, names=['date', 'row']).reset_index()
+ df = pd.concat(df, names=["date", "row"]).reset_index()
return df
+
def monthly_pnl_bycusip(df, strats):
- df = df[(df.strat.isin(strats)) & (df.custacctname=='V0NSCLMAMB')]
- pnl_cols = ['bookunrealmtm', 'bookrealmtm', 'bookrealincome', 'bookunrealincome', 'totalbookpl']
- return df.groupby('invid').resample('M').last()[['mtd'+col for col in pnl_cols]]
+ df = df[(df.strat.isin(strats)) & (df.custacctname == "V0NSCLMAMB")]
+ pnl_cols = [
+ "bookunrealmtm",
+ "bookrealmtm",
+ "bookrealincome",
+ "bookunrealincome",
+ "totalbookpl",
+ ]
+ return df.groupby("invid").resample("M").last()[["mtd" + col for col in pnl_cols]]
+
-if __name__=='__main__':
+if __name__ == "__main__":
valuation_reports()
pnl_reports()
- df_val = pd.read_hdf('globeop.hdf', 'valuation_report')
- df_pnl = pd.read_hdf('globeop.hdf', 'pnl')
- nav = df_val[df_val.Fund == 'SERCGMAST'].groupby('PeriodEndDate')['EndBookNAV'].sum()
- subprime_strats = ['MTG_GOOD', 'MTG_RW', 'MTG_IO','MTG_THRU', 'MTG_B4PR']
- clo_strats = ['CLO_BBB', 'CLO_AAA', 'CLO_BB20']
+ df_val = pd.read_hdf("globeop.hdf", "valuation_report")
+ df_pnl = pd.read_hdf("globeop.hdf", "pnl")
+ nav = (
+ df_val[df_val.Fund == "SERCGMAST"].groupby("PeriodEndDate")["EndBookNAV"].sum()
+ )
+ subprime_strats = ["MTG_GOOD", "MTG_RW", "MTG_IO", "MTG_THRU", "MTG_B4PR"]
+ clo_strats = ["CLO_BBB", "CLO_AAA", "CLO_BB20"]
## daily pnl by cusip
- #subprime_daily_pnl = daily_pnl_bycusip(df_pnl, subprime_strats)
+ # subprime_daily_pnl = daily_pnl_bycusip(df_pnl, subprime_strats)
df_monthly = monthly_pnl_bycusip(df_pnl, subprime_strats)
- #df_monthly.loc[idx[ts('2015-01-01'):ts('2015-01-31'),:],:]
+ # df_monthly.loc[idx[ts('2015-01-01'):ts('2015-01-31'),:],:]
# clo = df_pnl[df_pnl.Strat.isin(clo_strats)]
# clo_monthly_pnl = clo.groupby(level=0).sum()['MTD TotalBookPL'].resample('M').last()
# clo.groupby(level=0).sum()['2015-12-01':'2015-12-31']
- df_val.set_index(['custacctname', 'periodenddate', 'invid', 'strat'])
+ df_val.set_index(["custacctname", "periodenddate", "invid", "strat"])
diff --git a/python/load_refentity.py b/python/load_refentity.py
index 7e6a9f6b..2d07bf62 100644
--- a/python/load_refentity.py
+++ b/python/load_refentity.py
@@ -3,9 +3,11 @@ from pathlib import Path
import datetime
import re
import lxml.etree as etree
+
parser = etree.XMLParser(remove_blank_text=True)
from psycopg2.extras import Json
+
def todict(xml, uselist=set()):
if len(xml):
if xml.tag in uselist:
@@ -16,7 +18,7 @@ def todict(xml, uselist=set()):
if c.tag in d:
d[c.tag].append(todict(c, uselist))
else:
- if c.tag in ['cdssuccession', 'creditevent', 'auctionccyrate']:
+ if c.tag in ["cdssuccession", "creditevent", "auctionccyrate"]:
d[c.tag] = [todict(c, uselist)]
else:
d[c.tag] = todict(c, uselist)
@@ -24,9 +26,10 @@ def todict(xml, uselist=set()):
else:
return xml.text.strip()
+
def dispatch_parsing(col, uselist):
if len(col):
- if col.tag == 'ratings':
+ if col.tag == "ratings":
return [el.text for el in col.iterfind(".//tier")]
else:
return Json(todict(col, uselist))
@@ -36,23 +39,51 @@ def dispatch_parsing(col, uselist):
else:
return col.text
+
def insert_refentity(fname):
- tree = etree.parse(fname,
- parser=parser)
- conn = dbconn('serenitasdb')
- names = ['referenceentity', 'shortname', 'ticker', 'redentitycode',
- 'entitycusip', 'lei', 'entitytype', 'jurisdiction', 'depthlevel',
- 'markitsector', 'isdatradingdefinition', 'recorddate', 'ratings',
- 'entityform', 'companynumber', 'alternativenames',
- 'isdatransactiontypes', 'validto', 'validfrom', 'events',
- 'holdco', 'country',]
+ tree = etree.parse(fname, parser=parser)
+ conn = dbconn("serenitasdb")
+ names = [
+ "referenceentity",
+ "shortname",
+ "ticker",
+ "redentitycode",
+ "entitycusip",
+ "lei",
+ "entitytype",
+ "jurisdiction",
+ "depthlevel",
+ "markitsector",
+ "isdatradingdefinition",
+ "recorddate",
+ "ratings",
+ "entityform",
+ "companynumber",
+ "alternativenames",
+ "isdatransactiontypes",
+ "validto",
+ "validfrom",
+ "events",
+ "holdco",
+ "country",
+ ]
# these are tags which enclose a list
- uselist = set(['events', 'isdatransactiontypes', 'nextredentitycodes', 'prevredentitycodes',
- 'isdatransactiontypes', 'tiers', 'auctions'])
- sql_str = ("""INSERT INTO RefEntity VALUES({})
- ON CONFLICT(redentitycode) DO UPDATE SET {}""".
- format(",".join(["%s"] * len(names)),
- ",".join(f"{name}=EXCLUDED.{name}" for name in names[1:])))
+ uselist = set(
+ [
+ "events",
+ "isdatransactiontypes",
+ "nextredentitycodes",
+ "prevredentitycodes",
+ "isdatransactiontypes",
+ "tiers",
+ "auctions",
+ ]
+ )
+ sql_str = """INSERT INTO RefEntity VALUES({})
+ ON CONFLICT(redentitycode) DO UPDATE SET {}""".format(
+ ",".join(["%s"] * len(names)),
+ ",".join(f"{name}=EXCLUDED.{name}" for name in names[1:]),
+ )
skipfirst = True
with conn.cursor() as c:
for child in tree.getroot():
@@ -63,36 +94,68 @@ def insert_refentity(fname):
c.execute(sql_str, [d.get(name) for name in names])
conn.commit()
+
def parse_prospectus(xml):
return Json({c.tag: [e.text for e in c] for c in xml})
+
def insert_refobligation(fname):
- tree = etree.parse(fname,
- parser=parser)
- conn = dbconn('serenitasdb')
- names_redpair = ['redpaircode', 'role', 'referenceentity', 'redentitycode',
- 'tier', 'pairiscurrent', 'pairvalidfrom', 'pairvalidto',
- 'ticker', 'ispreferred', 'preferreddate', 'indexconstituents',
- 'recorddate', 'publiccomments', 'myticker',
- 'subordinationtype', 'holdco', 'preferredremovaldate']
- names_refobligation = ['obligationname', 'prospectusinfo', 'refentities',
- 'type', 'isconvert', 'isperp', 'coupontype', 'ccy',
- 'maturity', 'issuedate', 'coupon', 'isin', 'cusip', 'event']
+ tree = etree.parse(fname, parser=parser)
+ conn = dbconn("serenitasdb")
+ names_redpair = [
+ "redpaircode",
+ "role",
+ "referenceentity",
+ "redentitycode",
+ "tier",
+ "pairiscurrent",
+ "pairvalidfrom",
+ "pairvalidto",
+ "ticker",
+ "ispreferred",
+ "preferreddate",
+ "indexconstituents",
+ "recorddate",
+ "publiccomments",
+ "myticker",
+ "subordinationtype",
+ "holdco",
+ "preferredremovaldate",
+ ]
+ names_refobligation = [
+ "obligationname",
+ "prospectusinfo",
+ "refentities",
+ "type",
+ "isconvert",
+ "isperp",
+ "coupontype",
+ "ccy",
+ "maturity",
+ "issuedate",
+ "coupon",
+ "isin",
+ "cusip",
+ "event",
+ ]
- redpair_insert = ("""INSERT INTO RedPairMapping VALUES({})
- ON CONFLICT(redpaircode) DO UPDATE SET {}""".
- format(",".join(["%s"] * len(names_redpair)),
- ",".join(f"{name}=EXCLUDED.{name}" for name in names_redpair[1:])))
- refobligation_insert = ("""INSERT INTO RefObligation({}) VALUES({})
- ON CONFLICT(obligationname) DO UPDATE SET {}""".
- format(",".join(names_refobligation),
- ",".join(["%s"] * len(names_refobligation)),
- ",".join(f"{name}=EXCLUDED.{name}" for name in names_refobligation[1:])))
+ redpair_insert = """INSERT INTO RedPairMapping VALUES({})
+ ON CONFLICT(redpaircode) DO UPDATE SET {}""".format(
+ ",".join(["%s"] * len(names_redpair)),
+ ",".join(f"{name}=EXCLUDED.{name}" for name in names_redpair[1:]),
+ )
+ refobligation_insert = """INSERT INTO RefObligation({}) VALUES({})
+ ON CONFLICT(obligationname) DO UPDATE SET {}""".format(
+ ",".join(names_refobligation),
+ ",".join(["%s"] * len(names_refobligation)),
+ ",".join(f"{name}=EXCLUDED.{name}" for name in names_refobligation[1:]),
+ )
skipfirst = True
+
def simple_parse(e):
- if e.tag == 'indexconstituents':
+ if e.tag == "indexconstituents":
return [ic.text for ic in e]
- elif e.tag == 'holdco':
+ elif e.tag == "holdco":
return e.text == "TRUE"
else:
return e.text
@@ -104,26 +167,35 @@ def insert_refobligation(fname):
continue
d = {col.tag: col if len(col) else col.text for col in child}
with conn.cursor() as c2:
- for el in d['refentities']:
+ for el in d["refentities"]:
redpair = {e.tag: simple_parse(e) for e in el}
- c2.execute(redpair_insert, [redpair.get(name) for name in names_redpair])
+ c2.execute(
+ redpair_insert, [redpair.get(name) for name in names_redpair]
+ )
conn.commit()
- d['refentities'] = [el.text for el in d['refentities'].iterfind('.//redpaircode')]
- if 'prospectusinfo' in d:
- d['prospectusinfo'] = parse_prospectus(d['prospectusinfo'])
- c.execute(refobligation_insert, [d.get(name) for name in names_refobligation])
+ d["refentities"] = [
+ el.text for el in d["refentities"].iterfind(".//redpaircode")
+ ]
+ if "prospectusinfo" in d:
+ d["prospectusinfo"] = parse_prospectus(d["prospectusinfo"])
+ c.execute(
+ refobligation_insert, [d.get(name) for name in names_refobligation]
+ )
conn.commit()
+
def get_date(f):
m = re.search("(\d*)\.", f.name)
if m:
timestamp = int(m.groups(0)[0])
- return datetime.datetime.fromtimestamp(timestamp/1000)
+ return datetime.datetime.fromtimestamp(timestamp / 1000)
+
if __name__ == "__main__":
from markit_red import download_report
- base_dir = Path('/home/serenitas/CorpCDOs/Tranche_data/RED_reports/Deltas')
- for report in ['REDEntityDelta', 'REDObligationDelta']:
+
+ base_dir = Path("/home/serenitas/CorpCDOs/Tranche_data/RED_reports/Deltas")
+ for report in ["REDEntityDelta", "REDObligationDelta"]:
g = download_report(report)
f = base_dir / next(g)[0]
if "Entity" in report:
diff --git a/python/markit_red.py b/python/markit_red.py
index 35a10f05..d2bd98bb 100644
--- a/python/markit_red.py
+++ b/python/markit_red.py
@@ -9,11 +9,12 @@ from lxml import etree
from utils.db import serenitas_engine
import pandas as pd
+
def request_payload(payload):
- r = requests.post('https://products.markit.com/red/export.jsp', params=payload)
+ r = requests.post("https://products.markit.com/red/export.jsp", params=payload)
res = []
- path = os.path.join(os.environ['BASE_DIR'], "Tranche_data", "RED_reports")
- if 'Delta' in payload['report']:
+ path = os.path.join(os.environ["BASE_DIR"], "Tranche_data", "RED_reports")
+ if "Delta" in payload["report"]:
path = os.path.join(path, "Deltas")
try:
@@ -26,74 +27,110 @@ def request_payload(payload):
print(r.content)
return res
+
def download_report(report):
- version_mapping = {"REDEntity": 11,
- "REDEntityDelta": 11,
- "REDEntityMapped": 11,
- "REDObligation": 10,
- "REDObligationDelta": 10,
- "REDObligationMapped": 10,
- "REDSROObligation": 10,
- "REDobligationpreferred": 10,
- "CredIndexAnnex": 10,
- "CredIndexAnnexSplit": 10,
- "REDIndexCodes": 9,
- "redindexclassification": 9,
- "redindexclassificationdelta": 9}
+ version_mapping = {
+ "REDEntity": 11,
+ "REDEntityDelta": 11,
+ "REDEntityMapped": 11,
+ "REDObligation": 10,
+ "REDObligationDelta": 10,
+ "REDObligationMapped": 10,
+ "REDSROObligation": 10,
+ "REDobligationpreferred": 10,
+ "CredIndexAnnex": 10,
+ "CredIndexAnnexSplit": 10,
+ "REDIndexCodes": 9,
+ "redindexclassification": 9,
+ "redindexclassificationdelta": 9,
+ }
- payload = {'user': 'GuillaumeHorel',
- 'password': 'password',
- 'version': version_mapping[report],
- 'report': report}
+ payload = {
+ "user": "GuillaumeHorel",
+ "password": "password",
+ "version": version_mapping[report],
+ "report": report,
+ }
- if report in ['CredIndexAnnex', 'CredIndexAnnexSplit']:
- for family in ['CDX', 'ITRAXX-EUROPE']:
- payload.update({'family': family})
+ if report in ["CredIndexAnnex", "CredIndexAnnexSplit"]:
+ for family in ["CDX", "ITRAXX-EUROPE"]:
+ payload.update({"family": family})
yield request_payload(payload)
else:
yield request_payload(payload)
+
def update_redcodes(fname):
- with open(os.path.join(os.environ['BASE_DIR'], "Tranche_data", "RED_reports", fname)) as fh:
+ with open(
+ os.path.join(os.environ["BASE_DIR"], "Tranche_data", "RED_reports", fname)
+ ) as fh:
et = etree.parse(fh)
data_version = []
data_maturity = []
- for index in et.iter('index'):
- temp = {c.tag: c.text for c in index if c.tag not in ['terms', 'paymentfrequency']}
+ for index in et.iter("index"):
+ temp = {
+ c.tag: c.text for c in index if c.tag not in ["terms", "paymentfrequency"]
+ }
data_version.append(temp)
- for term in index.iter('term'):
- d = {'redindexcode': temp['redindexcode']}
+ for term in index.iter("term"):
+ d = {"redindexcode": temp["redindexcode"]}
d.update({c.tag: c.text for c in term})
data_maturity.append(d)
df_maturity = pd.DataFrame(data_maturity)
df_version = pd.DataFrame(data_version)
- df_version['activeversion'] = df_version['activeversion'].map({'Y': True, None: False})
- df_maturity.tenor = df_maturity['tenor'].map(lambda s: s.lower() + 'r')
+ df_version["activeversion"] = df_version["activeversion"].map(
+ {"Y": True, None: False}
+ )
+ df_maturity.tenor = df_maturity["tenor"].map(lambda s: s.lower() + "r")
df_maturity.coupon = (pd.to_numeric(df_maturity.coupon) * 10000).astype(int)
- df_version.to_sql("index_version_markit", serenitas_engine, index=False, if_exists='append')
- df_maturity.to_sql("index_maturity_markit", serenitas_engine, index=False, if_exists='append')
+ df_version.to_sql(
+ "index_version_markit", serenitas_engine, index=False, if_exists="append"
+ )
+ df_maturity.to_sql(
+ "index_maturity_markit", serenitas_engine, index=False, if_exists="append"
+ )
+
def update_redindices(fname):
- basedir = Path(os.environ['BASE_DIR']) / "Tranche_data"
+ basedir = Path(os.environ["BASE_DIR"]) / "Tranche_data"
with open(basedir / "RED_reports" / fname) as fh:
e = etree.parse(fh)
root = e.getroot()
- headers = ['referenceentity', 'redentitycode', 'role', 'redpaircode', 'jurisdiction',
- 'tier', 'pairiscurrent', 'pairvalidto', 'pairvalidfrom', 'ticker',
- 'ispreferred', 'isdatransactiontype', 'docclause', 'recorddate',
- 'publiccomments', 'weight', 'holdco', 'subordinationtype']
- for c in root.findall('index'):
- names = [c.find(tag).text for tag in ['indexsubfamily', 'series', 'version']]
- with open( basedir / "RED_indices" / "{0}.{1}.V{2}.csv".format(*names), "w") as fh2:
+ headers = [
+ "referenceentity",
+ "redentitycode",
+ "role",
+ "redpaircode",
+ "jurisdiction",
+ "tier",
+ "pairiscurrent",
+ "pairvalidto",
+ "pairvalidfrom",
+ "ticker",
+ "ispreferred",
+ "isdatransactiontype",
+ "docclause",
+ "recorddate",
+ "publiccomments",
+ "weight",
+ "holdco",
+ "subordinationtype",
+ ]
+ for c in root.findall("index"):
+ names = [c.find(tag).text for tag in ["indexsubfamily", "series", "version"]]
+ with open(
+ basedir / "RED_indices" / "{0}.{1}.V{2}.csv".format(*names), "w"
+ ) as fh2:
csvwriter = csv.DictWriter(fh2, fieldnames=headers)
csvwriter.writeheader()
data = []
- for constituent in c.findall('.//originalconstituent'):
+ for constituent in c.findall(".//originalconstituent"):
data.append({l.tag: l.text for l in constituent})
- data = sorted(data, key=lambda x: x['referenceentity'])
+ data = sorted(data, key=lambda x: x["referenceentity"])
csvwriter.writerows(data)
+
if __name__ == "__main__":
fname = next(download_report("REDIndexCodes"))
update_redcodes(fname[0])
diff --git a/python/parse_citi_pdf.py b/python/parse_citi_pdf.py
index 71bdc4c8..39a33f5a 100644
--- a/python/parse_citi_pdf.py
+++ b/python/parse_citi_pdf.py
@@ -3,19 +3,28 @@ import subprocess
from bs4 import BeautifulSoup
from env import DAILY_DIR
+
def load_pdf(file_path):
- proc = subprocess.run(["pdftohtml", "-xml", "-stdout", "-i",
- file_path.as_posix()],
- capture_output=True)
+ proc = subprocess.run(
+ ["pdftohtml", "-xml", "-stdout", "-i", file_path.as_posix()],
+ capture_output=True,
+ )
soup = BeautifulSoup(proc.stdout, features="lxml")
l = soup.findAll("text")
l = sorted(l, key=lambda x: (int(x["top"]), int(x["left"])))
return l
+
def get_col(l, top, bottom, left, right):
- return [c.text for c in l if int(c["left"]) >= left and \
- int(c["left"]) < right and \
- int(c["top"]) >= top and int(c["top"]) < bottom ]
+ return [
+ c.text
+ for c in l
+ if int(c["left"]) >= left
+ and int(c["left"]) < right
+ and int(c["top"]) >= top
+ and int(c["top"]) < bottom
+ ]
+
def parse_num(s):
s = s.replace(",", "")
@@ -24,18 +33,24 @@ def parse_num(s):
else:
return float(s)
+
def get_df(l, col1, col2, col3):
- df = pd.DataFrame({"amount": get_col(l, *col2),
- "currency": get_col(l, *col3)},
- index=get_col(l, *col1))
+ df = pd.DataFrame(
+ {"amount": get_col(l, *col2), "currency": get_col(l, *col3)},
+ index=get_col(l, *col1),
+ )
df.amount = df.amount.apply(parse_num)
df.index = df.index.str.lstrip()
return df
+
def get_citi_collateral(d):
try:
- fname = next((DAILY_DIR / "CITI_reports").
- glob(f"262966_MarginNotice_{d.strftime('%Y%m%d')}_*.pdf"))
+ fname = next(
+ (DAILY_DIR / "CITI_reports").glob(
+ f"262966_MarginNotice_{d.strftime('%Y%m%d')}_*.pdf"
+ )
+ )
except StopIteration:
raise FileNotFoundError(f"CITI file not found for date {d.date()}")
l = load_pdf(fname)
@@ -51,5 +66,7 @@ def get_citi_collateral(d):
col2 = (top, bottom, 100, 500)
col3 = (top, bottom, 500, 600)
initial_margin = get_df(l, col1, col2, col3)
- return variation_margin.loc["VM Total Collateral", "amount"] + \
- initial_margin.loc["Non Reg IM Total Collateral", "amount"]
+ return (
+ variation_margin.loc["VM Total Collateral", "amount"]
+ + initial_margin.loc["Non Reg IM Total Collateral", "amount"]
+ )
diff --git a/python/parse_gs.py b/python/parse_gs.py
index 14137a06..ffbd038f 100644
--- a/python/parse_gs.py
+++ b/python/parse_gs.py
@@ -13,15 +13,20 @@ for index in ["IG", "HY"]:
flag = False
masterdf = {}
for line in fh:
- line = line.decode('utf-8', 'ignore')
+ line = line.decode("utf-8", "ignore")
line = line.rstrip()
- m = re.search("(IG|HY)(\d{2}) 5y (?:.*)SWAPTION (?:UPDATE|CLOSES|CLOSE) - Ref\D+(.+)$", line)
+ m = re.search(
+ "(IG|HY)(\d{2}) 5y (?:.*)SWAPTION (?:UPDATE|CLOSES|CLOSE) - Ref\D+(.+)$",
+ line,
+ )
if m:
indextype = m.groups()[0]
series = int(m.groups()[1])
- if indextype == 'HY':
- refprice, refspread = map(float,
- re.match("([\S]+)\s+\(([^)]+)\)", m.groups()[2]).groups())
+ if indextype == "HY":
+ refprice, refspread = map(
+ float,
+ re.match("([\S]+)\s+\(([^)]+)\)", m.groups()[2]).groups(),
+ )
else:
refspread = float(m.groups()[2])
continue
@@ -29,10 +34,12 @@ for index in ["IG", "HY"]:
quotedate = pd.to_datetime(line[4:])
continue
if line.startswith("Expiry"):
- m = re.match("Expiry (\d{2}\w{3}\d{2}) \((?:([\S]+) )?([\S]+)\)", line)
+ m = re.match(
+ "Expiry (\d{2}\w{3}\d{2}) \((?:([\S]+) )?([\S]+)\)", line
+ )
if m:
date, fwprice, fwspread = m.groups()
- date = pd.to_datetime(date, format='%d%b%y')
+ date = pd.to_datetime(date, format="%d%b%y")
continue
if line.startswith("Stk"):
flag = True
@@ -41,7 +48,7 @@ for index in ["IG", "HY"]:
if flag:
if line:
vals = re.sub(" +", " ", line).split(" ")
- if indextype=='HY':
+ if indextype == "HY":
vals.pop(2)
vals.pop(9)
else:
@@ -50,62 +57,85 @@ for index in ["IG", "HY"]:
r.append(vals)
continue
else:
- if indextype=='HY':
- cols = ['Strike', 'Sprd', 'Pay', 'DeltaPay', 'Rec', 'Vol',
- 'VolChg', 'VolBpd', 'Tail']
+ if indextype == "HY":
+ cols = [
+ "Strike",
+ "Sprd",
+ "Pay",
+ "DeltaPay",
+ "Rec",
+ "Vol",
+ "VolChg",
+ "VolBpd",
+ "Tail",
+ ]
else:
- cols = ['Strike', 'Pay', 'DeltaPay', 'Rec', 'Vol',
- 'VolChg', 'VolBpd', 'Tail']
- df = pd.DataFrame.from_records(r, columns = cols)
+ cols = [
+ "Strike",
+ "Pay",
+ "DeltaPay",
+ "Rec",
+ "Vol",
+ "VolChg",
+ "VolBpd",
+ "Tail",
+ ]
+ df = pd.DataFrame.from_records(r, columns=cols)
- df[['PayBid', 'PayOffer']] = df.Pay.str.split('/', expand=True)
- df[['RecBid', 'RecOffer']] = df.Rec.str.split('/', expand=True)
- df.drop(['Pay', 'Rec'], axis=1, inplace=True)
+ df[["PayBid", "PayOffer"]] = df.Pay.str.split("/", expand=True)
+ df[["RecBid", "RecOffer"]] = df.Rec.str.split("/", expand=True)
+ df.drop(["Pay", "Rec"], axis=1, inplace=True)
for col in df:
- df[col] = pd.to_numeric(df[col], errors = 'coerce')
- df.set_index('Strike', inplace=True)
- d = {'quotedate': quotedate,
- 'expiry': date,
- 'index': indextype,
- 'series': series,
- 'ref': refspread if indextype =="IG" else refprice}
+ df[col] = pd.to_numeric(df[col], errors="coerce")
+ df.set_index("Strike", inplace=True)
+ d = {
+ "quotedate": quotedate,
+ "expiry": date,
+ "index": indextype,
+ "series": series,
+ "ref": refspread if indextype == "IG" else refprice,
+ }
if indextype == "IG":
- d['fwdspread'] = float(fwspread)
+ d["fwdspread"] = float(fwspread)
else:
- d['fwdprice'] = float(fwprice)
+ d["fwdprice"] = float(fwprice)
fwd_index.append(d)
masterdf[date] = df
flag = False
r = []
continue
- all_df[(quotedate, indextype, series)] = pd.concat(masterdf, names=['expiry'])
-all_df = pd.concat(all_df, names = ['quotedate', 'index', 'series'])
-all_df['DeltaPay'] = - all_df['DeltaPay']/100
-all_df['Vol'] /= 100
+ all_df[(quotedate, indextype, series)] = pd.concat(masterdf, names=["expiry"])
+all_df = pd.concat(all_df, names=["quotedate", "index", "series"])
+all_df["DeltaPay"] = -all_df["DeltaPay"] / 100
+all_df["Vol"] /= 100
index_df = pd.DataFrame(fwd_index)
all_df.reset_index(inplace=True)
-all_df = all_df.rename(columns={'Strike':'strike',
- 'Vol': 'vol',
- 'PayOffer': 'pay_offer',
- 'PayBid': 'pay_bid',
- 'RecOffer': 'rec_offer',
- 'RecBid': 'rec_bid',
- 'Tail': 'tail',
- 'DeltaPay': 'delta_pay'})
-del all_df['VolBpd'], all_df['VolChg']
-if 'Sprd' in all_df:
- del all_df['Sprd']
-all_df['quote_source'] = 'GS'
-index_df = index_df.drop_duplicates(['quotedate', 'index', 'series', 'expiry'])
+all_df = all_df.rename(
+ columns={
+ "Strike": "strike",
+ "Vol": "vol",
+ "PayOffer": "pay_offer",
+ "PayBid": "pay_bid",
+ "RecOffer": "rec_offer",
+ "RecBid": "rec_bid",
+ "Tail": "tail",
+ "DeltaPay": "delta_pay",
+ }
+)
+del all_df["VolBpd"], all_df["VolChg"]
+if "Sprd" in all_df:
+ del all_df["Sprd"]
+all_df["quote_source"] = "GS"
+index_df = index_df.drop_duplicates(["quotedate", "index", "series", "expiry"])
##insert quotes
from utils.db import dbengine
-serenitasdb = dbengine('serenitasdb')
+
+serenitasdb = dbengine("serenitasdb")
conn = serenitasdb.raw_connection()
-format_str = "INSERT INTO swaption_ref_quotes({}) VALUES({}) " \
- "ON CONFLICT DO NOTHING"
+format_str = "INSERT INTO swaption_ref_quotes({}) VALUES({}) " "ON CONFLICT DO NOTHING"
cols = index_df.columns
sqlstr = format_str.format(",".join(cols), ",".join(["%s"] * len(cols)))
with conn.cursor() as c:
diff --git a/python/position.py b/python/position.py
index 589b22c6..91286770 100644
--- a/python/position.py
+++ b/python/position.py
@@ -10,160 +10,208 @@ import os
import logging
import sys
-def get_list(engine, workdate: datetime.datetime=None, asset_class=None,
- include_unsettled: bool=True,
- fund="SERCGMAST"):
+
+def get_list(
+ engine,
+ workdate: datetime.datetime = None,
+ asset_class=None,
+ include_unsettled: bool = True,
+ fund="SERCGMAST",
+):
if workdate:
- positions = pd.read_sql_query("SELECT identifier, bbg_type FROM "
- "list_positions(%s, %s, %s, %s)",
- engine,
- params=(workdate.date(),
- asset_class,
- include_unsettled,
- fund))
- positions.loc[positions.identifier.str.len() <= 11, 'cusip'] = \
- positions.identifier.str.slice(stop=9)
- positions.loc[positions.identifier.str.len() == 12, 'isin'] = \
- positions.identifier
+ positions = pd.read_sql_query(
+ "SELECT identifier, bbg_type FROM " "list_positions(%s, %s, %s, %s)",
+ engine,
+ params=(workdate.date(), asset_class, include_unsettled, fund),
+ )
+ positions.loc[
+ positions.identifier.str.len() <= 11, "cusip"
+ ] = positions.identifier.str.slice(stop=9)
+ positions.loc[
+ positions.identifier.str.len() == 12, "isin"
+ ] = positions.identifier
else:
positions = pd.read_sql_table("securities", engine)
- positions['bbg_id'] = positions.cusip.where(positions.cusip.notnull(), positions['isin']) + \
- ' ' + positions.bbg_type
- positions.set_index('bbg_id', inplace=True)
+ positions["bbg_id"] = (
+ positions.cusip.where(positions.cusip.notnull(), positions["isin"])
+ + " "
+ + positions.bbg_type
+ )
+ positions.set_index("bbg_id", inplace=True)
return positions
+
def get_list_range(engine, begin, end, asset_class=None):
begin = pd.Timestamp(begin).date()
end = pd.Timestamp(end).date()
- positions = pd.read_sql_query("select identifier, bbg_type, strategy from list_positions_range(%s, %s, %s)",
- engine,
- params=(begin, end, asset_class))
- positions.loc[positions.identifier.str.len() <= 11, 'cusip'] = positions.identifier.str.slice(stop=9)
- positions.loc[positions.identifier.str.len() == 12, 'isin'] = positions.identifier
- positions['bbg_id'] = positions.cusip.where(positions.cusip.notnull(), positions['isin']) + \
- ' ' + positions.bbg_type
- positions.set_index('bbg_id', inplace=True)
+ positions = pd.read_sql_query(
+ "select identifier, bbg_type, strategy from list_positions_range(%s, %s, %s)",
+ engine,
+ params=(begin, end, asset_class),
+ )
+ positions.loc[
+ positions.identifier.str.len() <= 11, "cusip"
+ ] = positions.identifier.str.slice(stop=9)
+ positions.loc[positions.identifier.str.len() == 12, "isin"] = positions.identifier
+ positions["bbg_id"] = (
+ positions.cusip.where(positions.cusip.notnull(), positions["isin"])
+ + " "
+ + positions.bbg_type
+ )
+ positions.set_index("bbg_id", inplace=True)
return positions
-def backpopulate_marks(begin_str='2015-01-15', end_str='2015-07-15'):
+
+def backpopulate_marks(begin_str="2015-01-15", end_str="2015-07-15"):
pattern = re.compile("\d{4}-\d{2}-\d{2}")
- list_of_daily_folder = (fullpath for (fullpath, _, _) in os.walk('/home/serenitas/Daily')
- if pattern.match(os.path.basename(fullpath)))
+ list_of_daily_folder = (
+ fullpath
+ for (fullpath, _, _) in os.walk("/home/serenitas/Daily")
+ if pattern.match(os.path.basename(fullpath))
+ )
list_of_bdays = bdate_range(start=begin_str, end=end_str)
for path in list_of_daily_folder:
date = pd.to_datetime(os.path.basename(path))
if date in list_of_bdays:
marks_file = [f for f in os.listdir(path) if f.startswith("securitiesNpv")]
if marks_file:
- marks_file.sort(key=lambda x:x[13:], reverse=True) #sort by lexicographic order which is what we want since we use ISO dates
+ marks_file.sort(
+ key=lambda x: x[13:], reverse=True
+ ) # sort by lexicographic order which is what we want since we use ISO dates
marks = pd.read_csv(os.path.join(path, marks_file[0]))
positions = get_list(pd.to_datetime(date))
- positions = positions.merge(marks, left_on='identifier', right_on='IDENTIFIER')
- positions.drop(['IDENTIFIER', 'last_settle_date'], axis=1, inplace=True)
- positions['date'] = date
- positions.rename(columns={'Price': 'price'}, inplace=True)
+ positions = positions.merge(
+ marks, left_on="identifier", right_on="IDENTIFIER"
+ )
+ positions.drop(["IDENTIFIER", "last_settle_date"], axis=1, inplace=True)
+ positions["date"] = date
+ positions.rename(columns={"Price": "price"}, inplace=True)
positions = positions.drop_duplicates()
- positions.to_sql('position', engine, if_exists='append', index=False)
+ positions.to_sql("position", engine, if_exists="append", index=False)
+
def update_securities(engine, session, workdate):
- field = {'Corp': 'PREV_CPN_DT', 'Mtge': 'START_ACC_DT'}
+ field = {"Corp": "PREV_CPN_DT", "Mtge": "START_ACC_DT"}
securities = get_list(engine)
securities = securities[securities.paid_down.isnull()]
- data = retrieve_data(session, securities.index.tolist(),
- ['PREV_CPN_DT', 'START_ACC_DT', 'CUR_CPN', 'CPN_ASOF_DT'])
- data = pd.DataFrame.from_dict(data, orient='index')
- data = data[data.CPN_ASOF_DT.isnull() |(data.CPN_ASOF_DT <= workdate)]
+ data = retrieve_data(
+ session,
+ securities.index.tolist(),
+ ["PREV_CPN_DT", "START_ACC_DT", "CUR_CPN", "CPN_ASOF_DT"],
+ )
+ data = pd.DataFrame.from_dict(data, orient="index")
+ data = data[data.CPN_ASOF_DT.isnull() | (data.CPN_ASOF_DT <= workdate)]
m = securities.merge(data, left_index=True, right_index=True)
conn = engine.raw_connection()
with conn.cursor() as c:
- for r in m.to_dict('records'):
- accrued_field = field[r['bbg_type']]
+ for r in m.to_dict("records"):
+ accrued_field = field[r["bbg_type"]]
if r[accrued_field] < workdate:
- c.execute(f"UPDATE securities SET start_accrued_date=%({accrued_field})s "
- ",coupon=%(CUR_CPN)s WHERE identifier=%(identifier)s",
- r)
+ c.execute(
+ f"UPDATE securities SET start_accrued_date=%({accrued_field})s "
+ ",coupon=%(CUR_CPN)s WHERE identifier=%(identifier)s",
+ r,
+ )
conn.commit()
conn.close()
+
def init_fx(session, engine, startdate):
- currencies = ['EURUSD', 'CADUSD']
- securities = [c + ' Curncy' for c in currencies]
- data = retrieve_data(session, securities, ['PX_LAST'], start_date=startdate)
- data = data['EURUSD Curncy'].merge(data['CADUSD Curncy'], left_on='date', right_on='date')
- data.rename(columns={'PX_LAST_x': 'eurusd',
- 'PX_LAST_y': 'cadusd'}, inplace=True)
- data.to_sql('fx', engine, if_exists='append')
+ currencies = ["EURUSD", "CADUSD"]
+ securities = [c + " Curncy" for c in currencies]
+ data = retrieve_data(session, securities, ["PX_LAST"], start_date=startdate)
+ data = data["EURUSD Curncy"].merge(
+ data["CADUSD Curncy"], left_on="date", right_on="date"
+ )
+ data.rename(columns={"PX_LAST_x": "eurusd", "PX_LAST_y": "cadusd"}, inplace=True)
+ data.to_sql("fx", engine, if_exists="append")
+
def update_fx(conn, session, currencies):
- securities = [c + ' Curncy' for c in currencies]
- data = retrieve_data(session, securities, ['FIXED_CLOSING_PRICE_NY', 'PX_CLOSE_DT'])
- colnames = ['date']
+ securities = [c + " Curncy" for c in currencies]
+ data = retrieve_data(session, securities, ["FIXED_CLOSING_PRICE_NY", "PX_CLOSE_DT"])
+ colnames = ["date"]
values = []
for k, v in data.items():
- currency_pair = k.split(' ')[0].lower()
+ currency_pair = k.split(" ")[0].lower()
colnames.append(currency_pair)
- values.append(v['FIXED_CLOSING_PRICE_NY'])
- values = [v['PX_CLOSE_DT']] + values
- sqlstr = 'INSERT INTO fx({0}) VALUES({1}) ON CONFLICT DO NOTHING'.format(
- ",".join(colnames),
- ",".join(["%s"]*len(values)))
+ values.append(v["FIXED_CLOSING_PRICE_NY"])
+ values = [v["PX_CLOSE_DT"]] + values
+ sqlstr = "INSERT INTO fx({0}) VALUES({1}) ON CONFLICT DO NOTHING".format(
+ ",".join(colnames), ",".join(["%s"] * len(values))
+ )
with conn.cursor() as c:
c.execute(sqlstr, values)
conn.commit()
-def init_swap_rates(conn, session, tenors=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 20, 30],
- start_date=datetime.date(1998, 10, 7)):
+
+def init_swap_rates(
+ conn,
+ session,
+ tenors=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 20, 30],
+ start_date=datetime.date(1998, 10, 7),
+):
securities = [f"USISDA{t:02} Index" for t in tenors]
- data = retrieve_data(session, securities, ['PX_LAST'],
- start_date=datetime.date(1998, 10, 7))
+ data = retrieve_data(
+ session, securities, ["PX_LAST"], start_date=datetime.date(1998, 10, 7)
+ )
for t in tenors:
ticker = f"USISDA{t:02} Index"
- sql_str = f'INSERT INTO USD_swap_fixings(fixing_date, "{t}y") ' + \
- 'VALUES(%s, %s) ON CONFLICT (fixing_date)' + \
- f' DO UPDATE SET "{t}y" = excluded."{t}y"'
+ sql_str = (
+ f'INSERT INTO USD_swap_fixings(fixing_date, "{t}y") '
+ + "VALUES(%s, %s) ON CONFLICT (fixing_date)"
+ + f' DO UPDATE SET "{t}y" = excluded."{t}y"'
+ )
with conn.cursor() as c:
- c.executemany(sql_str,
- [(d, r) for d, r in data[ticker]['PX_LAST'].items()])
+ c.executemany(sql_str, [(d, r) for d, r in data[ticker]["PX_LAST"].items()])
conn.commit()
-def init_swaption_vol(session,
- tenors=['A', 'C', 'F', 'I'] + list(range(1, 11)) + [15, 20, 25, 30],
- source='BBIR',
- vol_type='N',
- start_date=datetime.date(1990, 1, 1)):
+
+def init_swaption_vol(
+ session,
+ tenors=["A", "C", "F", "I"] + list(range(1, 11)) + [15, 20, 25, 30],
+ source="BBIR",
+ vol_type="N",
+ start_date=datetime.date(1990, 1, 1),
+):
tickers = []
for t1 in tenors:
for t2 in tenors[4:]:
tickers.append(f"USS{vol_type}{t1:0>2}{t2} {source} Curncy")
- data = retrieve_data(session, tickers, ['PX_LAST'],
- start_date=start_date)
+ data = retrieve_data(session, tickers, ["PX_LAST"], start_date=start_date)
return data
-def split_tenor_expiry(ticker, vol_type='N'):
+
+def split_tenor_expiry(ticker, vol_type="N"):
m = re.match(f"USS{vol_type}(.{{2}})([^\s]*) ([^\s]*) Curncy", ticker)
expiry, tenor, _ = m.groups()
- if expiry[0] == '0':
+ if expiry[0] == "0":
expiry = expiry[1:]
if not expiry.isalpha():
expiry = int(expiry)
tenor = int(tenor)
return expiry, tenor
+
def insert_swaption_vol(data, conn, source, vol_type="N"):
- tenors = ['A', 'C', 'F', 'I'] + list(range(1, 11)) + [15, 20, 25, 30]
+ tenors = ["A", "C", "F", "I"] + list(range(1, 11)) + [15, 20, 25, 30]
df = pd.concat(data, axis=1)
df.columns = df.columns.get_level_values(0)
- df.columns = pd.MultiIndex.from_tuples([split_tenor_expiry(c, vol_type) for c in df.columns])
+ df.columns = pd.MultiIndex.from_tuples(
+ [split_tenor_expiry(c, vol_type) for c in df.columns]
+ )
table_name = "swaption_normal_vol" if vol_type == "N" else "swaption_lognormal_vol"
for t in tenors[-14:]:
- sql_str = f'INSERT INTO {table_name}(date, "{t}y", source) ' + \
- 'VALUES(%s, %s, %s) ON CONFLICT (date, source)' + \
- f' DO UPDATE SET "{t}y" = excluded."{t}y", source = excluded.source'
+ sql_str = (
+ f'INSERT INTO {table_name}(date, "{t}y", source) '
+ + "VALUES(%s, %s, %s) ON CONFLICT (date, source)"
+ + f' DO UPDATE SET "{t}y" = excluded."{t}y", source = excluded.source'
+ )
with conn.cursor() as c:
df_temp = df.xs(t, axis=1, level=1).reindex(tenors, axis=1)
for k, v in df_temp.iterrows():
@@ -172,68 +220,83 @@ def insert_swaption_vol(data, conn, source, vol_type="N"):
c.execute(sql_str, (k, v.tolist(), source))
conn.commit()
-def update_swaption_vol(conn, session,
- tenors=['A', 'C', 'F', 'I'] + list(range(1, 11)) + [15, 20, 25, 30],
- *,
- sources=['BBIR', 'CMPN', 'ICPL'],
- vol_type="N"):
+
+def update_swaption_vol(
+ conn,
+ session,
+ tenors=["A", "C", "F", "I"] + list(range(1, 11)) + [15, 20, 25, 30],
+ *,
+ sources=["BBIR", "CMPN", "ICPL"],
+ vol_type="N",
+):
"""
Parameters
----------
vol_type : one of 'N' or 'V' (normal or log-normal)
"""
table_name = "swaption_normal_vol" if vol_type == "N" else "swaption_lognormal_vol"
- for source in ['BBIR', 'CMPN', 'ICPL']:
+ for source in ["BBIR", "CMPN", "ICPL"]:
tickers = []
for expiry in tenors:
for tenor in tenors:
tickers.append(f"USS{vol_type}{expiry:0>2}{tenor} {source} Curncy")
- data = retrieve_data(session, tickers, ['PX_YEST_CLOSE', 'PX_CLOSE_DT'])
+ data = retrieve_data(session, tickers, ["PX_YEST_CLOSE", "PX_CLOSE_DT"])
for t in tenors[4:]:
- sql_str = f'INSERT INTO {table_name}(date, "{t}y", source) ' + \
- 'VALUES(%s, %s, %s) ON CONFLICT (date, source)' + \
- f' DO UPDATE SET "{t}y" = excluded."{t}y", source = excluded.source'
+ sql_str = (
+ f'INSERT INTO {table_name}(date, "{t}y", source) '
+ + "VALUES(%s, %s, %s) ON CONFLICT (date, source)"
+ + f' DO UPDATE SET "{t}y" = excluded."{t}y", source = excluded.source'
+ )
r = []
dates = []
for expiry in tenors:
ticker = f"USS{vol_type}{expiry:0>2}{t} {source} Curncy"
if data[ticker]:
- r.append(data[ticker]['PX_YEST_CLOSE'])
- dates.append(data[ticker]['PX_CLOSE_DT'])
+ r.append(data[ticker]["PX_YEST_CLOSE"])
+ dates.append(data[ticker]["PX_CLOSE_DT"])
else:
r.append(None)
dates.append(dates[-1])
if dates.count(dates[0]) < len(dates):
- raise ValueError('Not all quotes are from the same date')
+ raise ValueError("Not all quotes are from the same date")
with conn.cursor() as c:
c.execute(sql_str, (dates[0], r, source))
conn.commit()
-def update_swap_rates(conn, session,
- tenors=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 20, 30]):
+
+def update_swap_rates(
+ conn, session, tenors=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 20, 30]
+):
securities = [f"USISDA{t:02} Index" for t in tenors]
- data = retrieve_data(session, securities, ['PX_LAST', 'LAST_UPDATE_DT'])
+ data = retrieve_data(session, securities, ["PX_LAST", "LAST_UPDATE_DT"])
for t in tenors:
ticker = f"USISDA{t:02} Index"
- sql_str = f'INSERT INTO USD_swap_fixings(fixing_date, "{t}y") ' + \
- 'VALUES(%(LAST_UPDATE_DT)s, %(PX_LAST)s) ON CONFLICT (fixing_date)' + \
- f' DO UPDATE SET "{t}y" = %(PX_LAST)s'
+ sql_str = (
+ f'INSERT INTO USD_swap_fixings(fixing_date, "{t}y") '
+ + "VALUES(%(LAST_UPDATE_DT)s, %(PX_LAST)s) ON CONFLICT (fixing_date)"
+ + f' DO UPDATE SET "{t}y" = %(PX_LAST)s'
+ )
with conn.cursor() as c:
c.execute(sql_str, data[ticker])
conn.commit()
+
def update_cash_rates(conn, session, start_date=None):
- securities = {"FEDL01 Index": "FED_FUND",
- "US0001M Index": "1M_LIBOR",
- "US0003M Index": "3M_LIBOR"}
+ securities = {
+ "FEDL01 Index": "FED_FUND",
+ "US0001M Index": "1M_LIBOR",
+ "US0003M Index": "3M_LIBOR",
+ }
if start_date is None:
- data = retrieve_data(session, list(securities.keys()),
- ["PX_LAST", "LAST_UPDATE_DT"])
+ data = retrieve_data(
+ session, list(securities.keys()), ["PX_LAST", "LAST_UPDATE_DT"]
+ )
else:
- data = retrieve_data(session, list(securities.keys()),
- ["PX_LAST"], start_date=start_date.date())
+ data = retrieve_data(
+ session, list(securities.keys()), ["PX_LAST"], start_date=start_date.date()
+ )
sql_str = "INSERT INTO rates VALUES(%s, %s, %s) ON CONFLICT DO NOTHING"
with conn.cursor() as c:
if start_date is None:
@@ -245,53 +308,69 @@ def update_cash_rates(conn, session, start_date=None):
c.execute(sql_str, (d, securities[k], r))
conn.commit()
+
def populate_cashflow_history(engine, session, workdate=None, fund="SERCGMAST"):
securities = get_list(engine, workdate, fund=fund)
- data = retrieve_data(session, securities.index.tolist(),
- ['HIST_CASH_FLOW', 'MTG_HIST_CPN',
- 'FLT_CPN_HIST', 'HIST_INTEREST_DISTRIBUTED'])
- fixed_coupons = {'XS0306416982 Mtge': 7.62,
- '91927RAD1 Mtge': 6.77}
+ data = retrieve_data(
+ session,
+ securities.index.tolist(),
+ ["HIST_CASH_FLOW", "MTG_HIST_CPN", "FLT_CPN_HIST", "HIST_INTEREST_DISTRIBUTED"],
+ )
+ fixed_coupons = {"XS0306416982 Mtge": 7.62, "91927RAD1 Mtge": 6.77}
conn = engine.raw_connection()
for k, v in data.items():
- if 'HIST_CASH_FLOW' in v:
- to_insert = v['HIST_CASH_FLOW'].merge(v['MTG_HIST_CPN'], how='left',
- left_on='Payment Date',
- right_on='Payment Date')
- to_insert.rename(columns={'Coupon_y': 'coupon',
- 'Interest': 'interest',
- 'Payment Date': 'date',
- 'Principal Balance': 'principal_bal',
- 'Principal Paid': 'principal'}, inplace=True)
- to_insert.drop(['Period Number', 'Coupon_x'], axis=1, inplace=True)
- elif 'FLT_CPN_HIST' in v:
- to_insert = v['FLT_CPN_HIST']
- to_insert.rename(columns={'Coupon Rate': 'coupon',
- 'Accrual Start Date': 'date'}, inplace=True)
+ if "HIST_CASH_FLOW" in v:
+ to_insert = v["HIST_CASH_FLOW"].merge(
+ v["MTG_HIST_CPN"],
+ how="left",
+ left_on="Payment Date",
+ right_on="Payment Date",
+ )
+ to_insert.rename(
+ columns={
+ "Coupon_y": "coupon",
+ "Interest": "interest",
+ "Payment Date": "date",
+ "Principal Balance": "principal_bal",
+ "Principal Paid": "principal",
+ },
+ inplace=True,
+ )
+ to_insert.drop(["Period Number", "Coupon_x"], axis=1, inplace=True)
+ elif "FLT_CPN_HIST" in v:
+ to_insert = v["FLT_CPN_HIST"]
+ to_insert.rename(
+ columns={"Coupon Rate": "coupon", "Accrual Start Date": "date"},
+ inplace=True,
+ )
to_insert.coupon = to_insert.coupon.shift(1)
- elif 'HIST_INTEREST_DISTRIBUTED' in v:
- to_insert = v['HIST_INTEREST_DISTRIBUTED']
- to_insert.rename(columns={'Interest': 'interest',
- 'Historical Date': 'date'}, inplace=True)
+ elif "HIST_INTEREST_DISTRIBUTED" in v:
+ to_insert = v["HIST_INTEREST_DISTRIBUTED"]
+ to_insert.rename(
+ columns={"Interest": "interest", "Historical Date": "date"},
+ inplace=True,
+ )
if k in fixed_coupons:
- to_insert['coupon'] = fixed_coupons[k]
- else: #damn you XS0299146992 !
+ to_insert["coupon"] = fixed_coupons[k]
+ else: # damn you XS0299146992 !
continue
else:
logging.error("No cashflows for the given security")
- identifier = securities.loc[k, 'identifier']
- to_insert['identifier'] = identifier
+ identifier = securities.loc[k, "identifier"]
+ to_insert["identifier"] = identifier
with conn.cursor() as c:
c.execute("DELETE FROM cashflow_history WHERE identifier=%s", (identifier,))
conn.commit()
- to_insert.to_sql('cashflow_history', engine, if_exists='append', index=False)
+ to_insert.to_sql("cashflow_history", engine, if_exists="append", index=False)
with conn.cursor() as c:
c.execute("REFRESH MATERIALIZED VIEW factors_history")
conn.commit()
conn.close()
+
if __name__ == "__main__":
from utils.db import serenitas_pool, dawn_engine
+
dawn_conn = dawn_engine.raw_connection()
serenitas_conn = serenitas_pool.getconn()
if len(sys.argv) > 1:
@@ -302,7 +381,7 @@ if __name__ == "__main__":
update_securities(dawn_engine, session, workdate)
populate_cashflow_history(dawn_engine, session, workdate, "SERCGMAST")
populate_cashflow_history(dawn_engine, session, workdate, "BRINKER")
- update_fx(dawn_conn, session, ['EURUSD', 'CADUSD'])
+ update_fx(dawn_conn, session, ["EURUSD", "CADUSD"])
update_swap_rates(serenitas_conn, session)
update_cash_rates(serenitas_conn, session)
for vol_type in ["N", "V"]:
diff --git a/python/process_queue.py b/python/process_queue.py
index 8a56a751..ca50d3ce 100644
--- a/python/process_queue.py
+++ b/python/process_queue.py
@@ -22,83 +22,270 @@ from gmail_helpers import GmailMessage
from tabulate import tabulate
HEADERS_PRE = [
- 'Deal Type', 'Deal Id', 'Action', 'Client', 'Fund', 'Portfolio',
- 'Folder', 'Custodian', 'Cash Account', 'Counterparty', 'Comments',
- 'State', 'Trade Date']
-
-HEADERS = {'bond': HEADERS_PRE + [
- 'Settlement Date', 'Reserved', 'GlopeOp Security Identifier',
- 'CUSIP', 'ISIN', 'Reserved', 'Reserved',
- 'Reserved', 'Security Description', 'Transaction Indicator',
- 'SubTransaction Indicator', 'Accrued', 'Price', 'BlockId', 'BlockAmount',
- 'Reserved', 'Resesrved', 'Reserved', 'Reserved', 'ClientReference', 'ClearingMode',
- 'FaceAmount', 'Pool Factor', 'FactorAsOfDate', 'Delivery'],
- 'cds': HEADERS_PRE + [
- 'Reserved', 'Reserved', 'EffectiveDate', 'MaturityDate',
- 'Currency', 'Notional', 'FixedRate', 'PaymentRollDateConvention', 'DayCount',
- 'PaymentFrequency', 'FirstCouponRate', 'FirstCouponDate', 'ResetLag', 'Liquidation',
- 'LiquidationDate', 'Protection', 'UnderlyingSecurityId',
- 'UnderlyingSecurityDescription', 'CreditSpreadCurve',
- 'CreditEvents', 'RecoveryRate', 'Settlement', 'InitialMargin',
- 'InitialMarginPercentage','InitialMarginCurrency', 'DiscountCurve',
- 'ClientReference', 'UpfrontFee', 'UpfrontFeePayDate', 'RegenerateCashFlow',
- 'UpfrontFeeComment', 'Executing Broker','SwapType', 'OnPrice',
- 'OffPrice', 'AttachmentPoint', 'ExhaustionPoint', 'Fees', 'Fee Payment Dates',
- 'Fee Comments', 'Credit Event Occurred', 'Calendar',
- 'Clearing Facility', 'Adjusted', 'CcpTradeRef', 'BlockId',
- 'BlockAmount', 'NettingId', 'AnnouncementDate', 'ExecTS',
- 'DefaultProbability', 'ClientMargin', 'Factor', 'ISDADefinition'],
- 'swaption': HEADERS_PRE + [
- 'Reserved', 'Reserved', 'Reserved',
- 'Notional', 'PremiumSettlementDate', 'ExpirationDate',
- 'PremiumCurrency', 'PercentageOfPremium', 'ExerciseType', 'Reserved',
- 'SettlementMode', 'SettlementRate', 'Transaction Indicator',
- 'InitialMargin', 'InitialMarginPercentage', 'InitialMarginCurrency',
- 'ReceiveLegRateType', 'ReceiveFloatRate', 'ReceiveFirstCouponDate',
- 'ReceiveFirstCouponRate', 'ReceiveFixedRate', 'ReceiveDaycount',
- 'ReceiveFrequency', 'ReceivePaymentRollConvention',
- 'ReceiveEffectiveDate', 'ReceiveMaturityDate',
- 'ReceiveNotional', 'ReceiveArrears', 'ReceiveAdjusted', 'ReceiveCompound',
- 'ReceiveCurrency',
- 'PayLegRateType', 'PayFloatRate', 'PayFirstCouponDate',
- 'PayFirstCouponRate', 'PayFixedRate', 'PayDaycount', 'PayFrequency',
- 'PayPaymentRollConvention', 'PayEffectiveDate', 'PayMaturityDate',
- 'PayNotional', 'PayArrears', 'PayAdjusted', 'PayCompound', 'PayCurrency',
- 'RegenerateCashFlow', 'GiveUpBroker', 'ClientReference', 'ReceiveDiscountCurve',
- 'ReceiveForwardCurve', 'PayDiscountCurve', 'PayForwardCurve', 'ReceiveFixingFrequency',
- 'ReceiveInterestCalcMethod', 'ReceiveCompoundAverageFrequency',
- 'PayFixingFrequency', 'PayInterestCalcMethod', 'PayCompoundAverageFrequency',
- 'SwapType', 'AttachmentPoint', 'ExhaustionPoint', 'UnderlyingInstrument',
- 'AssociatedDealType', 'AssociatedDealId', 'CounterpartyReference',
- 'PremiumSettlementCurrency', 'PremiumSettlementAmount', 'ReceiveIMM Period',
- 'PayIMMPeriod', 'Reserved', 'ClearingFacility', 'Strike', 'CcpTradeRef',
- 'BreakClauseFrequency', 'BlockId', 'BlockAmount', 'Cross Currency Premium Payment',
- 'Premium Payment Amount', 'Netting Id', 'BreakClauseDate'],
- 'future': HEADERS_PRE + [
- "Settlement Date", "Reserved",
- "GlopeOp Security Identifier", "Reserved", "Reserved", "Reserved",
- "Bloomberg Ticker", "RIC", "Security Description",
- "Transaction Indicator", "SubTransaction Indicator",
- "Quantity", "Price", "Commission", "Tax", "VAT",
- "Trade Currency", "Reserved", "Reserved", "Broker Short Name",
- "MaturityDate", "Exchange", "Client Reference", "Swap Type",
- "Initial Margin", "Initial Margin Currency", "Future Event",
- "Commission Entries", "BlockId", "Block Amount"],
- 'wire': HEADERS_PRE + [
- "Settlement Date", "Reserved", "Reserved",
- "Currency", "Amount", "Associated Deal Type", "Associated Deal Id",
- "Transaction Type", "Instrument Type", "Yield", "Client Reference",
- "ClearingFacility", "Deal Function", "Reset Price", "Reset Date",
- "Ccp Trade Ref", "Margin Type", "Block Id", "Block Amount"],
- 'spot': HEADERS_PRE + ["Settlement Date", "Dealt Currency",
- "Spot Rate", "Reserved", "Buy Currency", "Buy Amount",
- "Sell Currency", "Sell Amount", "ClearingFees", "BlockId",
- "BlockAmount", "Commission Currency", "Commission", "Reserved",
- "AssociatedDealType", "AssociatedDealId", "BrokerShortName",
- "ClientReference"]
+ "Deal Type",
+ "Deal Id",
+ "Action",
+ "Client",
+ "Fund",
+ "Portfolio",
+ "Folder",
+ "Custodian",
+ "Cash Account",
+ "Counterparty",
+ "Comments",
+ "State",
+ "Trade Date",
+]
+HEADERS = {
+ "bond": HEADERS_PRE
+ + [
+ "Settlement Date",
+ "Reserved",
+ "GlopeOp Security Identifier",
+ "CUSIP",
+ "ISIN",
+ "Reserved",
+ "Reserved",
+ "Reserved",
+ "Security Description",
+ "Transaction Indicator",
+ "SubTransaction Indicator",
+ "Accrued",
+ "Price",
+ "BlockId",
+ "BlockAmount",
+ "Reserved",
+ "Resesrved",
+ "Reserved",
+ "Reserved",
+ "ClientReference",
+ "ClearingMode",
+ "FaceAmount",
+ "Pool Factor",
+ "FactorAsOfDate",
+ "Delivery",
+ ],
+ "cds": HEADERS_PRE
+ + [
+ "Reserved",
+ "Reserved",
+ "EffectiveDate",
+ "MaturityDate",
+ "Currency",
+ "Notional",
+ "FixedRate",
+ "PaymentRollDateConvention",
+ "DayCount",
+ "PaymentFrequency",
+ "FirstCouponRate",
+ "FirstCouponDate",
+ "ResetLag",
+ "Liquidation",
+ "LiquidationDate",
+ "Protection",
+ "UnderlyingSecurityId",
+ "UnderlyingSecurityDescription",
+ "CreditSpreadCurve",
+ "CreditEvents",
+ "RecoveryRate",
+ "Settlement",
+ "InitialMargin",
+ "InitialMarginPercentage",
+ "InitialMarginCurrency",
+ "DiscountCurve",
+ "ClientReference",
+ "UpfrontFee",
+ "UpfrontFeePayDate",
+ "RegenerateCashFlow",
+ "UpfrontFeeComment",
+ "Executing Broker",
+ "SwapType",
+ "OnPrice",
+ "OffPrice",
+ "AttachmentPoint",
+ "ExhaustionPoint",
+ "Fees",
+ "Fee Payment Dates",
+ "Fee Comments",
+ "Credit Event Occurred",
+ "Calendar",
+ "Clearing Facility",
+ "Adjusted",
+ "CcpTradeRef",
+ "BlockId",
+ "BlockAmount",
+ "NettingId",
+ "AnnouncementDate",
+ "ExecTS",
+ "DefaultProbability",
+ "ClientMargin",
+ "Factor",
+ "ISDADefinition",
+ ],
+ "swaption": HEADERS_PRE
+ + [
+ "Reserved",
+ "Reserved",
+ "Reserved",
+ "Notional",
+ "PremiumSettlementDate",
+ "ExpirationDate",
+ "PremiumCurrency",
+ "PercentageOfPremium",
+ "ExerciseType",
+ "Reserved",
+ "SettlementMode",
+ "SettlementRate",
+ "Transaction Indicator",
+ "InitialMargin",
+ "InitialMarginPercentage",
+ "InitialMarginCurrency",
+ "ReceiveLegRateType",
+ "ReceiveFloatRate",
+ "ReceiveFirstCouponDate",
+ "ReceiveFirstCouponRate",
+ "ReceiveFixedRate",
+ "ReceiveDaycount",
+ "ReceiveFrequency",
+ "ReceivePaymentRollConvention",
+ "ReceiveEffectiveDate",
+ "ReceiveMaturityDate",
+ "ReceiveNotional",
+ "ReceiveArrears",
+ "ReceiveAdjusted",
+ "ReceiveCompound",
+ "ReceiveCurrency",
+ "PayLegRateType",
+ "PayFloatRate",
+ "PayFirstCouponDate",
+ "PayFirstCouponRate",
+ "PayFixedRate",
+ "PayDaycount",
+ "PayFrequency",
+ "PayPaymentRollConvention",
+ "PayEffectiveDate",
+ "PayMaturityDate",
+ "PayNotional",
+ "PayArrears",
+ "PayAdjusted",
+ "PayCompound",
+ "PayCurrency",
+ "RegenerateCashFlow",
+ "GiveUpBroker",
+ "ClientReference",
+ "ReceiveDiscountCurve",
+ "ReceiveForwardCurve",
+ "PayDiscountCurve",
+ "PayForwardCurve",
+ "ReceiveFixingFrequency",
+ "ReceiveInterestCalcMethod",
+ "ReceiveCompoundAverageFrequency",
+ "PayFixingFrequency",
+ "PayInterestCalcMethod",
+ "PayCompoundAverageFrequency",
+ "SwapType",
+ "AttachmentPoint",
+ "ExhaustionPoint",
+ "UnderlyingInstrument",
+ "AssociatedDealType",
+ "AssociatedDealId",
+ "CounterpartyReference",
+ "PremiumSettlementCurrency",
+ "PremiumSettlementAmount",
+ "ReceiveIMM Period",
+ "PayIMMPeriod",
+ "Reserved",
+ "ClearingFacility",
+ "Strike",
+ "CcpTradeRef",
+ "BreakClauseFrequency",
+ "BlockId",
+ "BlockAmount",
+ "Cross Currency Premium Payment",
+ "Premium Payment Amount",
+ "Netting Id",
+ "BreakClauseDate",
+ ],
+ "future": HEADERS_PRE
+ + [
+ "Settlement Date",
+ "Reserved",
+ "GlopeOp Security Identifier",
+ "Reserved",
+ "Reserved",
+ "Reserved",
+ "Bloomberg Ticker",
+ "RIC",
+ "Security Description",
+ "Transaction Indicator",
+ "SubTransaction Indicator",
+ "Quantity",
+ "Price",
+ "Commission",
+ "Tax",
+ "VAT",
+ "Trade Currency",
+ "Reserved",
+ "Reserved",
+ "Broker Short Name",
+ "MaturityDate",
+ "Exchange",
+ "Client Reference",
+ "Swap Type",
+ "Initial Margin",
+ "Initial Margin Currency",
+ "Future Event",
+ "Commission Entries",
+ "BlockId",
+ "Block Amount",
+ ],
+ "wire": HEADERS_PRE
+ + [
+ "Settlement Date",
+ "Reserved",
+ "Reserved",
+ "Currency",
+ "Amount",
+ "Associated Deal Type",
+ "Associated Deal Id",
+ "Transaction Type",
+ "Instrument Type",
+ "Yield",
+ "Client Reference",
+ "ClearingFacility",
+ "Deal Function",
+ "Reset Price",
+ "Reset Date",
+ "Ccp Trade Ref",
+ "Margin Type",
+ "Block Id",
+ "Block Amount",
+ ],
+ "spot": HEADERS_PRE
+ + [
+ "Settlement Date",
+ "Dealt Currency",
+ "Spot Rate",
+ "Reserved",
+ "Buy Currency",
+ "Buy Amount",
+ "Sell Currency",
+ "Sell Amount",
+ "ClearingFees",
+ "BlockId",
+ "BlockAmount",
+ "Commission Currency",
+ "Commission",
+ "Reserved",
+ "AssociatedDealType",
+ "AssociatedDealId",
+ "BrokerShortName",
+ "ClientReference",
+ ],
}
+
def get_effective_date(d, swaption_type):
if swaption_type == "CD_INDEX_OPTION":
return previous_twentieth(d + datetime.timedelta(days=1))
@@ -107,22 +294,22 @@ def get_effective_date(d, swaption_type):
return pydate_from_qldate(cal.advance(Date.from_datetime(d), 2, Days))
-def get_trades(q, trade_type='bond', fund="SERCGMAST"):
+def get_trades(q, trade_type="bond", fund="SERCGMAST"):
queue_name = f"{trade_type}_{fund}"
r = q.lrange(queue_name, 0, -1)
df = [loads(e) for e in r]
list_trades = []
if df:
- for tradeid, v in groupby(df, lambda x: x['id']):
+ for tradeid, v in groupby(df, lambda x: x["id"]):
trades = list(v)
- trades = sorted(trades, key=lambda x: x['lastupdate'])
+ trades = sorted(trades, key=lambda x: x["lastupdate"])
if len(trades) == 1:
list_trades.append(trades[0])
else:
- if trades[-1]['action'] == 'CANCEL':
+ if trades[-1]["action"] == "CANCEL":
continue
- if trades[0]['action'] == 'NEW':
- trades[-1]['action'] = 'NEW'
+ if trades[0]["action"] == "NEW":
+ trades[-1]["action"] = "NEW"
list_trades.append(trades[-1])
return list_trades
@@ -135,279 +322,384 @@ def rename_keys(d, mapping):
def build_termination(obj):
- headers = ['DealType', 'DealId', 'Action', 'Client', 'SubAction', 'PartialTermination',
- 'TerminationAmount', 'TerminationDate', 'FeesPaid', 'FeesReceived',
- 'DealFunction', 'Reserved', 'ClientReference'] + ['Reserved'] * 4 + \
- ['SpecialInstructions', 'AssignedCounterparty'] + ['Reserved'] * 7 + \
- ['GoTradeId'] + ['Reserved'] * 8 + ['InMoney', 'FeeCurrency']
- return ['SwaptionDeal', obj['dealid'], 'Update', 'Serenitas', 'Termination']
+ headers = (
+ [
+ "DealType",
+ "DealId",
+ "Action",
+ "Client",
+ "SubAction",
+ "PartialTermination",
+ "TerminationAmount",
+ "TerminationDate",
+ "FeesPaid",
+ "FeesReceived",
+ "DealFunction",
+ "Reserved",
+ "ClientReference",
+ ]
+ + ["Reserved"] * 4
+ + ["SpecialInstructions", "AssignedCounterparty"]
+ + ["Reserved"] * 7
+ + ["GoTradeId"]
+ + ["Reserved"] * 8
+ + ["InMoney", "FeeCurrency"]
+ )
+ return ["SwaptionDeal", obj["dealid"], "Update", "Serenitas", "Termination"]
def build_line(obj, trade_type="bond"):
- obj['Client'] = 'Serenitas'
- obj['State'] = 'Valid'
- rename_cols = {'fund': 'Fund',
- 'action': 'Action',
- 'dealid': 'Deal Id',
- 'folder': 'Folder',
- 'custodian': 'Custodian',
- 'cashaccount': 'Cash Account',
- 'cp_code': 'Counterparty',
- 'identifier': 'GlopeOp Security Identifier',
- 'cusip': 'CUSIP',
- 'isin': 'ISIN',
- 'description': 'Security Description',
- 'accrued': 'Accrued',
- 'price': 'Price',
- 'faceamount': 'FaceAmount',
- 'trade_date': 'Trade Date',
- 'settle_date': 'Settlement Date',
- 'effective_date': 'EffectiveDate',
- 'maturity': 'MaturityDate',
- 'currency': 'Currency',
- 'curr_notional': 'Notional',
- 'fixed_rate': 'FixedRate',
- 'payment_rolldate': 'PaymentRollDateConvention',
- 'day_count': 'DayCount',
- 'protection': 'Protection',
- 'security_id': 'UnderlyingSecurityId',
- 'security_desc': 'UnderlyingSecurityDescription',
- 'upfront': 'UpfrontFee',
- 'upfront_settle_date': 'UpfrontFeePayDate',
- 'swap_type': 'SwapType',
- 'orig_attach': 'AttachmentPoint',
- 'orig_detach': 'ExhaustionPoint',
- 'clearing_facility': 'Clearing Facility',
- 'isda_definition': 'ISDADefinition',
- 'expiration_date': 'ExpirationDate',
- 'portfolio': 'Portfolio',
- 'settlement_type': 'SettlementMode'}
+ obj["Client"] = "Serenitas"
+ obj["State"] = "Valid"
+ rename_cols = {
+ "fund": "Fund",
+ "action": "Action",
+ "dealid": "Deal Id",
+ "folder": "Folder",
+ "custodian": "Custodian",
+ "cashaccount": "Cash Account",
+ "cp_code": "Counterparty",
+ "identifier": "GlopeOp Security Identifier",
+ "cusip": "CUSIP",
+ "isin": "ISIN",
+ "description": "Security Description",
+ "accrued": "Accrued",
+ "price": "Price",
+ "faceamount": "FaceAmount",
+ "trade_date": "Trade Date",
+ "settle_date": "Settlement Date",
+ "effective_date": "EffectiveDate",
+ "maturity": "MaturityDate",
+ "currency": "Currency",
+ "curr_notional": "Notional",
+ "fixed_rate": "FixedRate",
+ "payment_rolldate": "PaymentRollDateConvention",
+ "day_count": "DayCount",
+ "protection": "Protection",
+ "security_id": "UnderlyingSecurityId",
+ "security_desc": "UnderlyingSecurityDescription",
+ "upfront": "UpfrontFee",
+ "upfront_settle_date": "UpfrontFeePayDate",
+ "swap_type": "SwapType",
+ "orig_attach": "AttachmentPoint",
+ "orig_detach": "ExhaustionPoint",
+ "clearing_facility": "Clearing Facility",
+ "isda_definition": "ISDADefinition",
+ "expiration_date": "ExpirationDate",
+ "portfolio": "Portfolio",
+ "settlement_type": "SettlementMode",
+ }
rename_keys(obj, rename_cols)
- if trade_type in ['bond', 'swaption', 'future']:
- obj['Transaction Indicator'] = "Buy" if obj['buysell'] else "Sell"
- if trade_type == 'bond':
- obj['Deal Type'] = 'MortgageDeal'
- obj['Portfolio'] = 'MORTGAGES'
- obj['Delivery'] = 'S'
+ if trade_type in ["bond", "swaption", "future"]:
+ obj["Transaction Indicator"] = "Buy" if obj["buysell"] else "Sell"
+ if trade_type == "bond":
+ obj["Deal Type"] = "MortgageDeal"
+ obj["Portfolio"] = "MORTGAGES"
+ obj["Delivery"] = "S"
# zero coupon bond
- if obj['CUSIP'] != obj['GlopeOp Security Identifier']:
- obj['CUSIP'] = None
- elif trade_type == 'swaption':
- obj['Deal Type'] = 'SwaptionDeal'
- obj['ExerciseType'] = 'European'
- rename_keys(obj, {'Settlement Date': 'PremiumSettlementDate',
- 'Price': 'PercentageOfPremium',
- 'notional': 'Notional',
- 'initial_margin_percentage': 'InitialMarginPercentage'})
+ if obj["CUSIP"] != obj["GlopeOp Security Identifier"]:
+ obj["CUSIP"] = None
+ elif trade_type == "swaption":
+ obj["Deal Type"] = "SwaptionDeal"
+ obj["ExerciseType"] = "European"
+ rename_keys(
+ obj,
+ {
+ "Settlement Date": "PremiumSettlementDate",
+ "Price": "PercentageOfPremium",
+ "notional": "Notional",
+ "initial_margin_percentage": "InitialMarginPercentage",
+ },
+ )
- obj['RegenerateCashFlow'] = 'N'
- for direction in ['Pay', 'Receive']:
- obj[direction + 'MaturityDate'] = obj['MaturityDate']
- obj[direction + 'Currency'] = obj['Currency']
- obj[direction + 'Notional'] = obj['Notional']
- obj[direction + 'EffectiveDate'] = get_effective_date(obj['Trade Date'],
- obj['SwapType'])
- if obj['SwapType'] == 'CD_INDEX_OPTION':
- for direction in ['Pay', 'Receive']:
- obj[direction + 'Daycount'] = 'ACT/360'
- obj[direction + 'Frequency'] = 'Quarterly'
- obj[direction + 'PaymentRollConvention'] = 'Following'
+ obj["RegenerateCashFlow"] = "N"
+ for direction in ["Pay", "Receive"]:
+ obj[direction + "MaturityDate"] = obj["MaturityDate"]
+ obj[direction + "Currency"] = obj["Currency"]
+ obj[direction + "Notional"] = obj["Notional"]
+ obj[direction + "EffectiveDate"] = get_effective_date(
+ obj["Trade Date"], obj["SwapType"]
+ )
+ if obj["SwapType"] == "CD_INDEX_OPTION":
+ for direction in ["Pay", "Receive"]:
+ obj[direction + "Daycount"] = "ACT/360"
+ obj[direction + "Frequency"] = "Quarterly"
+ obj[direction + "PaymentRollConvention"] = "Following"
- if obj['option_type'] == 'PAYER':
- obj['ReceiveLegRateType'] = 'Float'
- obj['ReceiveFloatRate'] = 'US0003M'
- obj['PayLegRateType'] = 'Fixed'
- obj['PayFixedRate'] = obj['FixedRate']
- elif obj['option_type'] == 'RECEIVER':
- obj['PayLegRateType'] = 'Float'
- obj['PayFloatRate'] = 'US0003M'
- obj['ReceiveLegRateType'] = 'Fixed'
- obj['ReceiveFixedRate'] = obj['FixedRate']
- elif obj['SwapType'] == 'SWAPTION':
- for direction in ['Pay', 'Receive']:
- obj[direction + 'PaymentRollConvention'] = 'ModifiedFollowing'
- if (obj['buysell'] and obj['option_type'] == 'RECEIVER') or\
- (not obj['buysell'] and obj['option_type'] == 'PAYER'):
- obj['ReceiveFrequency'] = 'Half-Yearly'
- obj['ReceiveDaycount'] = '30/360'
- obj['PayFrequency'] = 'Quarterly'
- obj['PayDaycount'] = 'ACT/360'
- obj['ReceiveFixedRate'] = obj['strike']
- obj['ReceiveLegRateType'] = 'Fixed'
- obj['PayLegRateType'] = 'Float'
- obj['PayFloatRate'] = 'US0003M'
+ if obj["option_type"] == "PAYER":
+ obj["ReceiveLegRateType"] = "Float"
+ obj["ReceiveFloatRate"] = "US0003M"
+ obj["PayLegRateType"] = "Fixed"
+ obj["PayFixedRate"] = obj["FixedRate"]
+ elif obj["option_type"] == "RECEIVER":
+ obj["PayLegRateType"] = "Float"
+ obj["PayFloatRate"] = "US0003M"
+ obj["ReceiveLegRateType"] = "Fixed"
+ obj["ReceiveFixedRate"] = obj["FixedRate"]
+ elif obj["SwapType"] == "SWAPTION":
+ for direction in ["Pay", "Receive"]:
+ obj[direction + "PaymentRollConvention"] = "ModifiedFollowing"
+ if (obj["buysell"] and obj["option_type"] == "RECEIVER") or (
+ not obj["buysell"] and obj["option_type"] == "PAYER"
+ ):
+ obj["ReceiveFrequency"] = "Half-Yearly"
+ obj["ReceiveDaycount"] = "30/360"
+ obj["PayFrequency"] = "Quarterly"
+ obj["PayDaycount"] = "ACT/360"
+ obj["ReceiveFixedRate"] = obj["strike"]
+ obj["ReceiveLegRateType"] = "Fixed"
+ obj["PayLegRateType"] = "Float"
+ obj["PayFloatRate"] = "US0003M"
else:
- obj['ReceiveFrequency'] = 'Quarterly'
- obj['ReceiveDaycount'] = 'ACT/360'
- obj['PayFrequency'] = 'Half-Yearly'
- obj['PayDaycount'] = '30/360'
- obj['ReceiveFloatRate'] = 'US0003M'
- obj['ReceiveLegRateType'] = 'Float'
- obj['PayLegRateType'] = 'Fixed'
- obj['PayFixedRate'] = obj['strike']
+ obj["ReceiveFrequency"] = "Quarterly"
+ obj["ReceiveDaycount"] = "ACT/360"
+ obj["PayFrequency"] = "Half-Yearly"
+ obj["PayDaycount"] = "30/360"
+ obj["ReceiveFloatRate"] = "US0003M"
+ obj["ReceiveLegRateType"] = "Float"
+ obj["PayLegRateType"] = "Fixed"
+ obj["PayFixedRate"] = obj["strike"]
else:
- raise ValueError("'SwapType' needs to be one of 'CD_INDEX_OPTION' or 'SWAPTION'")
+ raise ValueError(
+ "'SwapType' needs to be one of 'CD_INDEX_OPTION' or 'SWAPTION'"
+ )
- obj['PremiumCurrency'] = obj['Currency']
- if obj['InitialMarginPercentage']:
- obj['InitialMarginCurrency'] = obj['Currency']
- obj['UnderlyingInstrument'] = obj.pop('UnderlyingSecurityId')
- if obj['SwapType'] == 'CD_INDEX_OPTION':
- obj['Strike'] = obj.pop('strike')
+ obj["PremiumCurrency"] = obj["Currency"]
+ if obj["InitialMarginPercentage"]:
+ obj["InitialMarginCurrency"] = obj["Currency"]
+ obj["UnderlyingInstrument"] = obj.pop("UnderlyingSecurityId")
+ if obj["SwapType"] == "CD_INDEX_OPTION":
+ obj["Strike"] = obj.pop("strike")
- elif trade_type == 'cds':
- freq = {4: 'Quarterly', 12: 'Monthly'}
- obj['Deal Type'] = 'CreditDefaultSwapDeal'
- obj['PaymentFrequency'] = freq[obj['frequency']]
- obj['InitialMarginPercentage'] = obj.pop('initial_margin_percentage')
- if obj['InitialMarginPercentage']:
- obj['InitialMarginCurrency'] = obj['Currency']
- if obj.get('AttachmentPoint') is None:
- obj['Executing Broker'] = obj['Counterparty']
- if obj['account_code'] == 'BAML':
- obj['Counterparty'] = 'BAMSNY'
- obj['Custodian'] = 'BOMLCM'
- elif obj['account_code'] == 'WF':
- obj['Counterparty'] = 'WELFEI'
- obj['Custodian'] = 'WELLSFCM'
- if obj['Clearing Facility'] is None:
- obj['Clearing Facility'] = 'NOT CLEARED'
+ elif trade_type == "cds":
+ freq = {4: "Quarterly", 12: "Monthly"}
+ obj["Deal Type"] = "CreditDefaultSwapDeal"
+ obj["PaymentFrequency"] = freq[obj["frequency"]]
+ obj["InitialMarginPercentage"] = obj.pop("initial_margin_percentage")
+ if obj["InitialMarginPercentage"]:
+ obj["InitialMarginCurrency"] = obj["Currency"]
+ if obj.get("AttachmentPoint") is None:
+ obj["Executing Broker"] = obj["Counterparty"]
+ if obj["account_code"] == "BAML":
+ obj["Counterparty"] = "BAMSNY"
+ obj["Custodian"] = "BOMLCM"
+ elif obj["account_code"] == "WF":
+ obj["Counterparty"] = "WELFEI"
+ obj["Custodian"] = "WELLSFCM"
+ if obj["Clearing Facility"] is None:
+ obj["Clearing Facility"] = "NOT CLEARED"
- elif trade_type == 'future':
- obj['Deal Type'] = 'FutureDeal'
- rename_keys(obj, {'currency': 'Trade Currency',
- 'commission': 'Commission',
- 'quantity': 'Quantity',
- 'swap_type': 'Swap Type',
- 'bbg_ticker': 'Bloomberg Ticker',
- 'Currency': 'Trade Currency',
- 'exchange': 'Exchange'})
- elif trade_type == 'wire':
- obj['Deal Type'] = 'CashFlowDeal'
- obj['Transaction Type'] = 'Transfer'
- obj['Instrument Type'] = 'Cashflow'
- obj['Settlement Date'] = obj['Trade Date']
- rename_keys(obj, {'amount': 'Amount'})
- elif trade_type == 'spot':
- obj['Deal Type'] = 'SpotDeal'
- rename_keys(obj, {'commission': 'Commission',
- 'commission_currency': 'Commission Currency',
- 'sell_currency': 'Sell Currency',
- 'sell_amount': 'Sell Amount',
- 'buy_currency': 'Buy Currency',
- 'buy_amount': 'Buy Amount',
- 'spot_rate': 'Spot Rate'})
+ elif trade_type == "future":
+ obj["Deal Type"] = "FutureDeal"
+ rename_keys(
+ obj,
+ {
+ "currency": "Trade Currency",
+ "commission": "Commission",
+ "quantity": "Quantity",
+ "swap_type": "Swap Type",
+ "bbg_ticker": "Bloomberg Ticker",
+ "Currency": "Trade Currency",
+ "exchange": "Exchange",
+ },
+ )
+ elif trade_type == "wire":
+ obj["Deal Type"] = "CashFlowDeal"
+ obj["Transaction Type"] = "Transfer"
+ obj["Instrument Type"] = "Cashflow"
+ obj["Settlement Date"] = obj["Trade Date"]
+ rename_keys(obj, {"amount": "Amount"})
+ elif trade_type == "spot":
+ obj["Deal Type"] = "SpotDeal"
+ rename_keys(
+ obj,
+ {
+ "commission": "Commission",
+ "commission_currency": "Commission Currency",
+ "sell_currency": "Sell Currency",
+ "sell_amount": "Sell Amount",
+ "buy_currency": "Buy Currency",
+ "buy_amount": "Buy Amount",
+ "spot_rate": "Spot Rate",
+ },
+ )
return [obj.get(h, None) for h in HEADERS[trade_type]]
-def get_bbg_data(conn, session, identifier, cusip=None, isin=None,
- settle_date=None, asset_class=None, **kwargs):
+def get_bbg_data(
+ conn,
+ session,
+ identifier,
+ cusip=None,
+ isin=None,
+ settle_date=None,
+ asset_class=None,
+ **kwargs,
+):
fields = ["MTG_FACTOR_SET_DT", "INT_ACC"]
- fields_dict = {'Mtge': ["MTG_FACE_AMT", "START_ACC_DT"],
- 'Corp': ["AMT_ISSUED", "PREV_CPN_DT"]}
+ fields_dict = {
+ "Mtge": ["MTG_FACE_AMT", "START_ACC_DT"],
+ "Corp": ["AMT_ISSUED", "PREV_CPN_DT"],
+ }
with conn.cursor() as c:
- c.execute("SELECT identifier FROM securities WHERE identifier=%s",
- (identifier,))
+ c.execute(
+ "SELECT identifier FROM securities WHERE identifier=%s", (identifier,)
+ )
if not c.fetchone():
- fields += ["MATURITY", "CRNCY", "NAME", "FLOATER", "FLT_SPREAD", "CPN",
- "CPN_FREQ", "FIRST_CPN_DT", "MTG_PAY_DELAY", "DAY_CNT_DES"]
+ fields += [
+ "MATURITY",
+ "CRNCY",
+ "NAME",
+ "FLOATER",
+ "FLT_SPREAD",
+ "CPN",
+ "CPN_FREQ",
+ "FIRST_CPN_DT",
+ "MTG_PAY_DELAY",
+ "DAY_CNT_DES",
+ ]
cusip_or_isin = cusip or isin
- for bbg_type in ['Mtge', 'Corp']:
- bbg_id = cusip_or_isin + ' ' + bbg_type
- data = retrieve_data(session, [bbg_id], fields + fields_dict[bbg_type],
- overrides={'SETTLE_DT': settle_date} if settle_date else None)
+ for bbg_type in ["Mtge", "Corp"]:
+ bbg_id = cusip_or_isin + " " + bbg_type
+ data = retrieve_data(
+ session,
+ [bbg_id],
+ fields + fields_dict[bbg_type],
+ overrides={"SETTLE_DT": settle_date} if settle_date else None,
+ )
if data[bbg_id]:
break
else:
- logging.error(f'{cusip_or_isin} not in bloomberg')
+ logging.error(f"{cusip_or_isin} not in bloomberg")
return
bbg_data = data[bbg_id]
- if bbg_data.get('MTG_FACTOR_SET_DT', 0) == 0:
- bbg_data['MTG_FACTOR_SET_DT'] = 1
- bbg_data['INT_ACC'] = 0
- if len(fields) > 2: # we don't have the data in the securities table
- sql_fields = ['identifier', 'cusip', 'isin', 'description', 'face_amount',
- 'maturity', 'floater', 'spread', 'coupon', 'frequency',
- 'day_count', 'first_coupon_date', 'pay_delay', 'currency',
- 'bbg_type', 'asset_class', 'start_accrued_date']
+ if bbg_data.get("MTG_FACTOR_SET_DT", 0) == 0:
+ bbg_data["MTG_FACTOR_SET_DT"] = 1
+ bbg_data["INT_ACC"] = 0
+ if len(fields) > 2: # we don't have the data in the securities table
+ sql_fields = [
+ "identifier",
+ "cusip",
+ "isin",
+ "description",
+ "face_amount",
+ "maturity",
+ "floater",
+ "spread",
+ "coupon",
+ "frequency",
+ "day_count",
+ "first_coupon_date",
+ "pay_delay",
+ "currency",
+ "bbg_type",
+ "asset_class",
+ "start_accrued_date",
+ ]
placeholders = ",".join(["%s"] * len(sql_fields))
columns = ",".join(sql_fields)
sqlstr = f"INSERT INTO securities({columns}) VALUES({placeholders})"
- isfloater = bbg_data['FLOATER'] == 'Y'
- pay_delay = bbg_data.get('MTG_PAY_DELAY', 0)
- day_count = bbg_data.get('DAY_CNT_DES')
+ isfloater = bbg_data["FLOATER"] == "Y"
+ pay_delay = bbg_data.get("MTG_PAY_DELAY", 0)
+ day_count = bbg_data.get("DAY_CNT_DES")
m = re.match("[^(\s]+", day_count)
if m:
day_count = m.group(0)
if isinstance(pay_delay, str):
- pay_delay = int(pay_delay.split(' ')[0])
+ pay_delay = int(pay_delay.split(" ")[0])
with conn.cursor() as c:
- c.execute(sqlstr, (identifier, cusip, isin, bbg_data['NAME'],
- bbg_data.get('MTG_FACE_AMT') or bbg_data.get('AMT_ISSUED'),
- bbg_data.get('MATURITY'), isfloater,
- bbg_data.get('FLT_SPREAD') if isfloater else None,
- bbg_data.get('CPN') if not isfloater else None,
- bbg_data.get('CPN_FREQ'), day_count,
- bbg_data.get('FIRST_CPN_DT'), pay_delay,
- bbg_data.get('CRNCY'), bbg_type, asset_class,
- bbg_data.get('START_ACC_DT') or bbg_data.get('PREV_CPN_DT')))
+ c.execute(
+ sqlstr,
+ (
+ identifier,
+ cusip,
+ isin,
+ bbg_data["NAME"],
+ bbg_data.get("MTG_FACE_AMT") or bbg_data.get("AMT_ISSUED"),
+ bbg_data.get("MATURITY"),
+ isfloater,
+ bbg_data.get("FLT_SPREAD") if isfloater else None,
+ bbg_data.get("CPN") if not isfloater else None,
+ bbg_data.get("CPN_FREQ"),
+ day_count,
+ bbg_data.get("FIRST_CPN_DT"),
+ pay_delay,
+ bbg_data.get("CRNCY"),
+ bbg_type,
+ asset_class,
+ bbg_data.get("START_ACC_DT") or bbg_data.get("PREV_CPN_DT"),
+ ),
+ )
conn.commit()
return bbg_data
+
def bond_trade_process(conn, session, trade):
bbg_data = get_bbg_data(conn, session, **trade)
- currentface = trade['faceamount'] * bbg_data['MTG_FACTOR_SET_DT']
- accrued_payment = bbg_data['INT_ACC'] * currentface /100.
- principal_payment = currentface * trade['price'] / 100.
+ currentface = trade["faceamount"] * bbg_data["MTG_FACTOR_SET_DT"]
+ accrued_payment = bbg_data["INT_ACC"] * currentface / 100.0
+ principal_payment = currentface * trade["price"] / 100.0
with conn:
with conn.cursor() as c:
- c.execute("UPDATE bonds SET principal_payment = %s, accrued_payment = %s "
- "WHERE id = %s", (principal_payment, accrued_payment, int(trade['id'])))
+ c.execute(
+ "UPDATE bonds SET principal_payment = %s, accrued_payment = %s "
+ "WHERE id = %s",
+ (principal_payment, accrued_payment, int(trade["id"])),
+ )
# mark it at buy price
- if trade['buysell']:
+ if trade["buysell"]:
sqlstr = "INSERT INTO marks VALUES(%s, %s, %s) ON CONFLICT DO NOTHING"
with conn:
with conn.cursor() as c:
- c.execute(sqlstr, (trade['trade_date'], trade['identifier'], trade['price']))
+ c.execute(
+ sqlstr, (trade["trade_date"], trade["identifier"], trade["price"])
+ )
def send_email(trade):
# send out email with trade content
email = GmailMessage()
email.set_content(print_trade(trade))
- email['To'] = 'nyops@lmcg.com'
- email['Subject'] = email_subject(trade)
+ email["To"] = "nyops@lmcg.com"
+ email["Subject"] = email_subject(trade)
email.send()
def is_tranche_trade(trade):
- return trade['swap_type'] in ('CD_INDEX_TRANCHE', 'BESPOKE')
+ return trade["swap_type"] in ("CD_INDEX_TRANCHE", "BESPOKE")
def cds_trade_process(conn, session, trade):
- sqlstr = ("SELECT indexfactor/100 FROM index_version "
- "WHERE redindexcode=%(security_id)s")
+ sqlstr = (
+ "SELECT indexfactor/100 FROM index_version "
+ "WHERE redindexcode=%(security_id)s"
+ )
try:
with conn.cursor() as c:
c.execute(sqlstr, trade)
factor, = c.fetchone()
except ValueError:
- bbg_data = get_bbg_data(conn, session, trade['security_id'],
- isin=trade['security_id'],
- asset_class='Subprime')
+ bbg_data = get_bbg_data(
+ conn,
+ session,
+ trade["security_id"],
+ isin=trade["security_id"],
+ asset_class="Subprime",
+ )
- factor = bbg_data['MTG_FACTOR_SET_DT']
+ factor = bbg_data["MTG_FACTOR_SET_DT"]
if is_tranche_trade(trade):
- tranche_factor = ((trade['attach'] - trade['detach']) /
- (trade['orig_attach'] - trade['orig_detach']))
- trade['curr_notional'] = trade['notional'] * tranche_factor
+ tranche_factor = (trade["attach"] - trade["detach"]) / (
+ trade["orig_attach"] - trade["orig_detach"]
+ )
+ trade["curr_notional"] = trade["notional"] * tranche_factor
else:
- trade['curr_notional'] = trade['notional'] * factor
+ trade["curr_notional"] = trade["notional"] * factor
return trade
@@ -425,59 +717,73 @@ def generate_csv(l, trade_type="bond", fund="SERCGMAST"):
return output.getvalue().encode()
-def get_filepath(base_dir: pathlib.Path, trade_type: str,
- fund: str) -> pathlib.Path:
- d = {'bond': 'Mortgages',
- 'cds': 'CreditDefaultSwapDeal',
- 'swaption': 'SwaptionDeal',
- 'future': 'Future',
- 'wire': 'CashFlowDeal',
- 'spot': 'SpotDeal',
- 'capfloor': 'TODO'}
+def get_filepath(base_dir: pathlib.Path, trade_type: str, fund: str) -> pathlib.Path:
+ d = {
+ "bond": "Mortgages",
+ "cds": "CreditDefaultSwapDeal",
+ "swaption": "SwaptionDeal",
+ "future": "Future",
+ "wire": "CashFlowDeal",
+ "spot": "SpotDeal",
+ "capfloor": "TODO",
+ }
timestamp = datetime.datetime.now()
if fund == "BRINKER":
- return (base_dir / str(timestamp.date()) /
- f"LMCG_BBH_SWAP_TRADES_P.{timestamp:%Y%m%d%H%M%S}.csv")
+ return (
+ base_dir
+ / str(timestamp.date())
+ / f"LMCG_BBH_SWAP_TRADES_P.{timestamp:%Y%m%d%H%M%S}.csv"
+ )
else:
- return (base_dir / str(timestamp.date()) /
- f'Serenitas.ALL.{timestamp:%Y%m%d.%H%M%S}.{d[trade_type]}.csv')
+ return (
+ base_dir
+ / str(timestamp.date())
+ / f"Serenitas.ALL.{timestamp:%Y%m%d.%H%M%S}.{d[trade_type]}.csv"
+ )
def upload_file(file_path: pathlib.Path) -> None:
if "BBH" in file_path.name:
return
- ftp = FTP('ftp.globeop.com')
- ftp.login('srntsftp', config.ftp_password)
- ftp.cwd('incoming')
+ ftp = FTP("ftp.globeop.com")
+ ftp.login("srntsftp", config.ftp_password)
+ ftp.cwd("incoming")
cmd = f"STOR {file_path.name}"
with file_path.open("rb") as fh:
- ftp.storbinary(cmd, fh)
+ ftp.storbinary(cmd, fh)
-def write_buffer(buf: bytes, base_dir: pathlib.Path,
- trade_type: str = "bond",
- fund: str = "SERCGMAST"):
+def write_buffer(
+ buf: bytes,
+ base_dir: pathlib.Path,
+ trade_type: str = "bond",
+ fund: str = "SERCGMAST",
+):
file_path = get_filepath(base_dir, trade_type, fund)
file_path.write_bytes(buf)
return file_path
def email_subject(trade):
- return "[{0}] {1} {2} {3}".format(trade['asset_class'], trade['action'],
- "Buy" if trade['buysell'] else "Sell",
- trade['description'])
+ return "[{0}] {1} {2} {3}".format(
+ trade["asset_class"],
+ trade["action"],
+ "Buy" if trade["buysell"] else "Sell",
+ trade["description"],
+ )
def print_trade(trade):
d = trade.copy()
- d['buysell'] = "Buy" if d["buysell"] else "Sell"
+ d["buysell"] = "Buy" if d["buysell"] else "Sell"
return tabulate((k, v) for k, v in d.items())
if __name__ == "__main__":
parser = argparse.ArgumentParser()
- parser.add_argument("-n", "--no-upload", action="store_true",
- help="do not upload to Globeop")
+ parser.add_argument(
+ "-n", "--no-upload", action="store_true", help="do not upload to Globeop"
+ )
parser.add_argument("fund", nargs="?", default="SERCGMAST")
args = parser.parse_args()
q = get_redis_queue()
@@ -486,11 +792,11 @@ if __name__ == "__main__":
except KeyError:
sys.exit("Please set path of daily directory in 'DAILY_DIR'")
- dawndb = dbconn('dawndb')
- for trade_type in ['bond', 'cds', 'swaption', 'future', 'wire', 'spot', 'capfloor']:
+ dawndb = dbconn("dawndb")
+ for trade_type in ["bond", "cds", "swaption", "future", "wire", "spot", "capfloor"]:
list_trades = get_trades(q, trade_type, args.fund)
if list_trades:
- if trade_type in ['bond', 'cds']:
+ if trade_type in ["bond", "cds"]:
process_fun = globals()[f"{trade_type}_trade_process"]
with init_bbg_session(BBG_IP) as session:
for trade in list_trades:
@@ -503,9 +809,10 @@ if __name__ == "__main__":
try:
buf = generate_csv(
filter(lambda t: t.get("upload", True), list_trades),
- trade_type, args.fund)
- file_path = write_buffer(buf, DAILY_DIR,
- trade_type, args.fund)
+ trade_type,
+ args.fund,
+ )
+ file_path = write_buffer(buf, DAILY_DIR, trade_type, args.fund)
if not args.no_upload:
upload_file(file_path)
except IOError:
diff --git a/python/sftp.py b/python/sftp.py
deleted file mode 100644
index 14d2ee68..00000000
--- a/python/sftp.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import paramiko
-import os
-
-hostname="alias.im"
-port=22
-username="guillaume"
-paramiko.util.log_to_file('sftp.log')
-
-t = paramiko.Transport((hostname, port))
-keys = paramiko.Agent().get_keys()
-rsakeys = [key for key in keys if key.name=="ssh-rsa"]
-gkey = rsakeys[0]
-host_keys = paramiko.util.load_host_keys(os.path.expanduser('~/.ssh/known_hosts'))
-hostkey = host_keys[hostname]['ssh-rsa']
-t.connect(username="guillaume", hostkey=hostkey, pkey=gkey)
-sftp = paramiko.SFTPClient.from_transport(t)
-dirlist = sftp.listdir('.')
-print("Dirlist:", dirlist)
-t.close()
diff --git a/python/swaption_quotes.py b/python/swaption_quotes.py
index 27a80b7a..83ecb427 100644
--- a/python/swaption_quotes.py
+++ b/python/swaption_quotes.py
@@ -5,14 +5,22 @@ from db import serenitas_pool
from statistics import median
-def get_refids(conn, index, series, expiry, value_date=datetime.date.today(),
- sources=["GS", "MS", "CITI"]):
- sql_str = ("SELECT ref_id, ref, quotedate FROM swaption_ref_quotes "
- "WHERE quotedate::date=%s "
- " AND quote_source=%s "
- " AND index=%s AND series=%s"
- " AND expiry=%s "
- "ORDER BY quotedate DESC LIMIT 1")
+def get_refids(
+ conn,
+ index,
+ series,
+ expiry,
+ value_date=datetime.date.today(),
+ sources=["GS", "MS", "CITI"],
+):
+ sql_str = (
+ "SELECT ref_id, ref, quotedate FROM swaption_ref_quotes "
+ "WHERE quotedate::date=%s "
+ " AND quote_source=%s "
+ " AND index=%s AND series=%s"
+ " AND expiry=%s "
+ "ORDER BY quotedate DESC LIMIT 1"
+ )
d = {}
with conn.cursor() as c:
for s in sources:
@@ -20,57 +28,70 @@ def get_refids(conn, index, series, expiry, value_date=datetime.date.today(),
d[s] = c.fetchone()
return d
-def adjust_stacks(index_type, series, expiry,
- value_date=datetime.date.today(),
- sources=["GS", "MS", "CITI"], common_ref=None):
+
+def adjust_stacks(
+ index_type,
+ series,
+ expiry,
+ value_date=datetime.date.today(),
+ sources=["GS", "MS", "CITI"],
+ common_ref=None,
+):
conn = serenitas_pool.getconn()
d = get_refids(conn, index_type, series, expiry, value_date, sources)
if all(v is None for v in d.values()):
raise ValueError("no quotes")
if common_ref is None:
common_ref = median(v[1] for v in d.values())
- index = CreditIndex(index_type, series, "5yr", value_date=value_date,
- notional=10000.)
+ index = CreditIndex(
+ index_type, series, "5yr", value_date=value_date, notional=10000.0
+ )
index.ref = common_ref
old_pv = index.pv
quotes = {}
for s, (ref_id, ref, _) in d.items():
index.ref = ref
dindex_pv = index.pv - old_pv
- df = pd.read_sql_query("SELECT strike, pay_bid, pay_offer, delta_pay, "
- "rec_bid, rec_offer, delta_rec FROM swaption_quotes "
- "WHERE ref_id=%s ORDER BY strike",
- conn,
- params=(ref_id,),
- index_col=['strike'])
+ df = pd.read_sql_query(
+ "SELECT strike, pay_bid, pay_offer, delta_pay, "
+ "rec_bid, rec_offer, delta_rec FROM swaption_quotes "
+ "WHERE ref_id=%s ORDER BY strike",
+ conn,
+ params=(ref_id,),
+ index_col=["strike"],
+ )
if s == "GS":
- df['delta_rec'] = 1 - df['delta_pay']
+ df["delta_rec"] = 1 - df["delta_pay"]
if index_type == "HY":
- df[['pay_bid', 'pay_offer', 'rec_bid', 'rec_offer']] *=100
+ df[["pay_bid", "pay_offer", "rec_bid", "rec_offer"]] *= 100
if s == "CITI":
- df['delta_rec'] *= -1
- if dindex_pv != 0.:
- df[['pay_bid', 'pay_offer']] = df[['pay_bid', 'pay_offer']].sub(
- df.delta_pay * dindex_pv, axis=0)
- df[['rec_bid', 'rec_offer']] = df[['rec_bid', 'rec_offer']].add(
- df.delta_rec * dindex_pv, axis=0)
+ df["delta_rec"] *= -1
+ if dindex_pv != 0.0:
+ df[["pay_bid", "pay_offer"]] = df[["pay_bid", "pay_offer"]].sub(
+ df.delta_pay * dindex_pv, axis=0
+ )
+ df[["rec_bid", "rec_offer"]] = df[["rec_bid", "rec_offer"]].add(
+ df.delta_rec * dindex_pv, axis=0
+ )
quotes[s] = df
- quotes = pd.concat(quotes, names=['source'])
- quotes = quotes.swaplevel('source', 'strike').sort_index()
- inside_quotes = pd.concat([
- quotes[['pay_bid', 'rec_bid']].groupby(level='strike').max(),
- quotes[['pay_offer', 'rec_offer']].groupby(level='strike').min()],
- axis=1
+ quotes = pd.concat(quotes, names=["source"])
+ quotes = quotes.swaplevel("source", "strike").sort_index()
+ inside_quotes = pd.concat(
+ [
+ quotes[["pay_bid", "rec_bid"]].groupby(level="strike").max(),
+ quotes[["pay_offer", "rec_offer"]].groupby(level="strike").min(),
+ ],
+ axis=1,
).sort_index(axis=1)
- quotes = quotes.unstack('source')
+ quotes = quotes.unstack("source")
d = {}
- for k in ['pay_bid', 'rec_bid']:
- #quotes[k].style.apply(highlight_max, axis=1)
+ for k in ["pay_bid", "rec_bid"]:
+ # quotes[k].style.apply(highlight_max, axis=1)
df = pd.concat([quotes[k], inside_quotes[k]], axis=1)
- d[k] = df.rename(columns={k: 'Best'})
- for k in ['pay_offer', 'rec_offer']:
- #quotes[k].style.apply(highlight_min, axis=1)
+ d[k] = df.rename(columns={k: "Best"})
+ for k in ["pay_offer", "rec_offer"]:
+ # quotes[k].style.apply(highlight_min, axis=1)
df = pd.concat([inside_quotes[k], quotes[k]], axis=1)
- d[k] = df.rename(columns={k: 'Best'})
+ d[k] = df.rename(columns={k: "Best"})
serenitas_pool.putconn(conn)
return common_ref, pd.concat(d, axis=1)
diff --git a/python/task_runner.py b/python/task_runner.py
index 52bc1e98..ad2fb817 100644
--- a/python/task_runner.py
+++ b/python/task_runner.py
@@ -6,15 +6,16 @@ from common import get_redis_queue
from subprocess import CalledProcessError
from tasks import Rpc
+
def run():
- ET = dbconn('etdb')
+ ET = dbconn("etdb")
q = get_redis_queue()
notify("READY=1")
while True:
- rpc = Rpc.from_json(q.blpop("tasks")[1].decode('utf-8'))
+ rpc = Rpc.from_json(q.blpop("tasks")[1].decode("utf-8"))
print("Running '{}' with {}".format(rpc.fun, rpc.args))
- if rpc.fun == 'generate_scenarios':
+ if rpc.fun == "generate_scenarios":
rpc.args += [ET]
try:
rpc()
@@ -23,13 +24,14 @@ def run():
else:
print("'{}' completed".format(rpc.fun))
if rpc.fun == "build_portfolios":
- q.rpush("tasks", str(Rpc('build_scenarios', rpc.args)))
+ q.rpush("tasks", str(Rpc("build_scenarios", rpc.args)))
if rpc.fun == "build_scenarios":
- q.rpush("tasks", str(Rpc('generate_scenarios', rpc.args[:-1])))
+ q.rpush("tasks", str(Rpc("generate_scenarios", rpc.args[:-1])))
ET.close()
-if __name__=="__main__":
- logger = logging.getLogger('intex')
+
+if __name__ == "__main__":
+ logger = logging.getLogger("intex")
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
run()
diff --git a/python/test_hyoption.py b/python/test_hyoption.py
deleted file mode 100644
index b6d01ab0..00000000
--- a/python/test_hyoption.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from analytics import Index, Swaption
-import datetime
-index = Index.from_name("hy", 27, "5yr",
- trade_date = datetime.date(2016, 11, 16))
-index.price = 103.75
-exercise_date = datetime.date(2017, 3, 15)
-strike = 102.5
-payer = Swaption(index, exercise_date, strike, strike_is_price = True)
-payer.sigma = .4
-payer.notional= 1e7
-#payer.pv = 1.948 * 1e-2
-
-receiver = Swaption(index, exercise_date, strike, "receiver", strike_is_price = True)
-receiver.sigma = .4
-receiver.notional= 1e7
-