aboutsummaryrefslogtreecommitdiffstats
path: root/python/swaption_pnl.py
blob: ae002d2ed09cda8a43581cb0dfd1a382c71cc8b2 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
import datetime
import pandas as pd
from psycopg2.extensions import connection
from risk.swaptions import get_swaption_portfolio
from risk.indices import get_index_portfolio
from pandas.tseries.offsets import BDay
from pyisda.date import previous_twentieth
from typing import Tuple


def get_index_pv(
    start_date: datetime.date,
    end_date: datetime.date,
    conn: connection,
    strategies: Tuple[str] = None,
):
    dr = pd.bdate_range(start_date, end_date)
    pvs = []
    daily = []
    dates = []

    for d in dr:
        prev_day = (d - BDay()).date()
        if previous_twentieth(d, roll=True) == d.date():
            accrued = 0.0
            for t in portf.trades:
                d, amount = t._fee_leg.cashflows[0]
                accrued -= amount * t.notional * t.factor * t.fixed_rate * 1e-4
        else:
            accrued = 0.0
        portf = get_index_portfolio(prev_day, conn, strategies)
        nav = 0.0
        with conn.cursor() as c:
            c.execute(
                "SELECT upfront FROM cds WHERE trade_date=%s " "AND folder in %s",
                (prev_day, strategies),
            )
            for (fee,) in c:
                nav += fee
        daily.append(nav + accrued)
        pvs.append(portf.pv)
        dates.append(prev_day)
    df = pd.DataFrame({"pv": pvs, "daily": daily}, index=pd.to_datetime(dates))
    return df


def get_swaption_pv(
    start_date: datetime.date, end_date: datetime.date, conn: connection, **kwargs
):
    dr = pd.bdate_range(start_date, end_date)
    pv = []
    daily = []
    dates = []
    for d in dr:
        prev_day = (d - BDay()).date()
        portf = get_swaption_portfolio(prev_day, conn, **kwargs)
        nav = 0.0
        # add terminations
        with conn.cursor() as c:
            c.execute(
                "SELECT termination_fee "
                "FROM swaptions WHERE termination_date=%s AND folder != 'STEEP'",
                (prev_day,),
            )
            for (fee,) in c:
                nav += fee
        # add new trades
        with conn.cursor() as c:
            c.execute(
                "SELECT notional * price/100 * (CASE WHEN buysell THEN -1. ELSE 1. END) "
                "FROM swaptions WHERE trade_date=%s AND folder != 'STEEP'",
                (prev_day,),
            )
            for (fee,) in c:
                nav += fee
        dates.append(prev_day)
        pv.append(portf.pv)
        daily.append(nav)
    df = pd.DataFrame({"pv": pv, "daily": daily}, index=pd.to_datetime(dates))
    return df


if __name__ == "__main__":
    import argparse
    from utils.db import dbconn

    dawndb = dbconn("dawndb")
    parser = argparse.ArgumentParser()
    parser.add_argument("start_date", type=datetime.datetime.fromisoformat)
    parser.add_argument("end_date", type=datetime.datetime.fromisoformat)
    parser.add_argument(
        "-e",
        "--external",
        action="store_true",
        default=False,
        dest="use_external",
        help="use brokers' marks",
    )
    parser.add_argument(
        "-s",
        "--source",
        action="append",
        default=["GS"],
        dest="source_list",
        help="quote source",
    )
    args = parser.parse_args()
    df_index = get_index_pv(
        args.start_date, args.end_date, dawndb, ("IGOPTDEL", "HYOPTDEL")
    )
    df_swaption = get_swaption_pv(conn=dawndb, **vars(args))
    pnl_index = df_index.pv.diff() + df_index.daily
    pnl_swaption = df_swaption.pv.diff() + df_swaption.daily
    pnl = pd.concat([pnl_index, pnl_swaption], keys=["index", "swaption"], axis=1)
    print(
        pd.concat(
            [pnl.sum(axis=1), pnl.sum(axis=1).cumsum()],
            axis=1,
            keys=["daily", "cumulative"],
        )
    )