aboutsummaryrefslogtreecommitdiffstats
path: root/python/cds_curve.py
blob: f80ebd2b1b767571a05078e91bcc01f6b1582cd5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
from analytics.basket_index import MarkitBasketIndex
from pyisda.legs import FeeLeg, ContingentLeg
from pyisda.logging import enable_logging

import datetime
import numpy as np
import pandas as pd

from db import dbconn

def all_curves_pv(curves, today_date, jp_yc, start_date, step_in_date, value_date, maturities):
    r = {}
    for d in maturities:
        tenor = {}
        coupon_leg = FeeLeg(start_date, d, True, 1., 1.)
        default_leg = ContingentLeg(start_date, d, True)
        accrued = coupon_leg.accrued(step_in_date)
        tickers = []
        data = []
        for sc in curves:
            coupon_leg_pv = coupon_leg.pv(today_date, step_in_date, value_date, jp_yc, sc, False)
            default_leg_pv = default_leg.pv(today_date, step_in_date, value_date,
                                            jp_yc, sc, 0.4)
            tickers.append(sc.ticker)
            data.append((coupon_leg_pv-accrued, default_leg_pv))
        r[pd.Timestamp(d)] = pd.DataFrame.from_records(data,
                                                       index=tickers,
                                                       columns=['duration', 'protection_pv'])
    return pd.concat(r, axis=1).swaplevel(axis=1).sort_index(axis=1,level=0)


def calibrate_portfolio(index_type, series, tenors=['3yr', '5yr', '7yr', '10yr'],
                        start_date=None):
    index = MarkitBasketIndex(index_type, series, tenors)
    r = {}
    if start_date:
        index.index_quotes = index.index_quotes[start_date:]
    for trade_date in index.index_quotes.index.get_level_values(0):
        index.trade_date = trade_date
        index.tweak()
        r[trade_date] = pd.DataFrame({'duration': index.duration(),
                                      'theta': index.theta(),
                                      'tweak': index.tweaks}, index=tenors)
    return pd.concat(r)

if __name__=="__main__":
    enable_logging()
    import argparse
    import sys
    parser = argparse.ArgumentParser()
    parser.add_argument('index', help="index type (IG, HY, EU or XO)")
    parser.add_argument('series', help="series", type=int)
    parser.add_argument('--latest', required=False, action="store_true")
    args = parser.parse_args()
    index, series = args.index, args.series
    conn = dbconn('serenitasdb')

    if args.latest:
        with conn.cursor() as c:
            c.execute("SELECT min(date) FROM index_quotes WHERE index=%s AND series=%s "
                      "AND duration2 is NULL", (index, series))
            start_date, = c.fetchone()
            if start_date is None:
                sys.exit(0)
    else:
        start_date = None

    df = calibrate_portfolio(index, series, ['3yr', '5yr', '7yr', '10yr'],
                             start_date)

    with conn.cursor() as c:
        for k, s in df.iterrows():
            c.execute("UPDATE index_quotes SET duration2=%s, theta2=%s "\
                      "WHERE date=%s AND tenor=%s AND index=%s AND series=%s",
                      (s.duration, s.theta, k[0], k[1], index, series))
    conn.commit()
    conn.close()