aboutsummaryrefslogtreecommitdiffstats
path: root/python/exploration/vcube.py
blob: 8f78af3298da8ca5ffcf7c3cd76e82cda4e10b86 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
import datetime
import lz4
from bbg_helpers import init_bbg_session, retrieve_data, BBG_IP
from analytics.utils import tenor_t
from db import dbconn
import pandas as pd

spreads = [-200, -100, -50, -25, 25, 50, 100, 200]
spread_letter = {200: 'D', 100: 'C', 75: 'G', 50: 'B', 25: 'A'}


def ticker(expiry, tenor, spread, vol_type, source):
    if spread < 0:
        spread_mapping = "R" + spread_letter[abs(spread)]
    else:
        spread_mapping = "P" + spread_letter[abs(spread)]
    if vol_type == "V":
        return f"US{spread_mapping}{expiry:0>2}{tenor:0>2} {source} Curncy"
    elif vol_type == "N":
        return f"USS{spread_mapping}{expiry}{tenor:0>2} {source} Curncy"

# Normal vol
# sources GFIS, CMPN
def get_tickers(vol_type="V", source="GFIS"):
    expiry = ["C", "F", 1, 2, 3, 4, 5, 7, 10, 20, 30]
    tenor = [1, 2, 5, 10, 20, 30]
    for s in spreads:
        for e in expiry:
            for t in tenor:
                yield ticker(e, t, s, vol_type, source)

def to_tenor(s):
    d = {'A': '1m', 'C': '3m', 'F': '6m'}
    return d.get(s, s + 'yr')

def get_vol_cube(conn, date, source='GFIS'):
    with conn.cursor() as c:
        c.execute("SELECT cube FROM swaption_vol_cube WHERE date=%s AND source=%s",
                  (date, source))
        cube, = next(c)
    return pd.read_msgpack(lz4.block.decompress(cube))

if __name__ == "__main__":
    serenitasdb = dbconn('serenitasdb')

    with init_bbg_session(BBG_IP) as session:
        for source in ["GFIS", "CMPN"]:
            data = retrieve_data(session, list(get_tickers("V", source)),
                                 ['PX_LAST'],
                                 start_date=datetime.date(2013, 1, 1))

            df = pd.concat(data, axis=1)
            df.columns = df.columns.droplevel(1)
            col = df.columns.to_series().str.extract("US([RP])([DCGBA])(\d[0-9ACF])(\d{2})")
            pos_dict = {v: k for k, v in spread_letter.items()}
            neg_dict = {v: -k for k, v in spread_letter.items()}
            col.loc[col[0] == 'P', 1] = col.loc[col[0] == 'P', 1].map(pos_dict)
            col.loc[col[0] == 'R', 1] = col.loc[col[0] == 'R', 1].map(neg_dict)


            col[2] = col[2].str.lstrip("0").map(to_tenor).astype(tenor_t)
            col[3] = col[3].str.lstrip("0").map(to_tenor).astype(tenor_t)
            df.columns = pd.MultiIndex.from_arrays([col[1], col[2], col[3]],
                                                   names=['spread', 'expiry', 'tenor'])
            df = df.sort_index(axis=1)
            with serenitasdb.cursor() as c:
                for date, s in df.iterrows():
                    s = s.unstack('tenor')
                    c.execute("INSERT INTO swaption_vol_cube(date, cube, source) "
                              "VALUES(%s, %s, %s) ON CONFLICT (date, source) "
                              "DO UPDATE SET cube = excluded.cube",
                              (date, lz4.block.compress(s.to_msgpack()), source))
                serenitasdb.commit()