aboutsummaryrefslogtreecommitdiffstats
path: root/python/bbg_prices.py
blob: dbd95016737d7840070da933b324eb1331c9638b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
from bbg_helpers import init_bbg_session, retrieve_data, BBG_IP
from sqlalchemy import create_engine
import numpy as np
import pandas as pd

from psycopg2.extensions import register_adapter, AsIs

register_adapter(type(pd.NaT), lambda nat: AsIs(None))

engine = create_engine("postgresql://et_user@debian/ET")

fields_update = [
    "LN_ISSUE_STATUS",
    "AMT_OUTSTANDING",
    "PX_LAST",
    "LAST_UPDATE_DT",
    "LN_CURRENT_MARGIN",
    "DEFAULTED",
    "DEFAULT_DATE",
    "CALLED",
    "CALLED_DT",
    "PRICING_SOURCE",
]

# append securities to request
cusips = pd.read_sql_query(
    "SELECT id_bb_unique, substring(id_bb_unique from 3) AS cusip "
    "FROM bloomberg_corp_ref "
    "WHERE (status is Null or status NOT IN ('REFINANCED','RETIRED', 'REPLACED')) "
    "AND not called",
    engine,
    index_col="cusip",
)

securities = ["{0} Corp".format(cusip) for cusip in cusips.index]

with init_bbg_session(BBG_IP) as session:
    data = retrieve_data(session, securities, fields_update)

df = pd.DataFrame.from_dict(data, orient="index")
df["security"] = df.index.str.slice(0, 9)
df.set_index(["security"], inplace=True)
df["ID_BB_UNIQUE"] = cusips["id_bb_unique"]
df.reset_index(inplace=True)
to_insert = df[
    [
        "DEFAULTED",
        "DEFAULT_DATE",
        "CALLED",
        "CALLED_DT",
        "LN_ISSUE_STATUS",
        "ID_BB_UNIQUE",
    ]
]
to_insert = to_insert.where(to_insert.notnull(), None)

conn = engine.raw_connection()
with conn.cursor() as c:
    c.executemany(
        "UPDATE bloomberg_corp_ref SET defaulted = %(DEFAULTED)s, "
        "default_date = %(DEFAULT_DATE)s, called= %(CALLED)s, called_date = %(CALLED_DT)s, "
        "status = %(LN_ISSUE_STATUS)s WHERE id_bb_unique=%(ID_BB_UNIQUE)s",
        to_insert.to_dict("records"),
    )
conn.commit()

currentdata = pd.read_sql_query(
    "SELECT id_bb_unique, pricingdate from bloomberg_corp",
    engine,
    parse_dates=["pricingdate"],
    index_col=["id_bb_unique", "pricingdate"],
)
# no need to insert empty prices
df.dropna(subset=["PX_LAST", "LAST_UPDATE_DT"], inplace=True)
df.set_index(["ID_BB_UNIQUE", "LAST_UPDATE_DT"], inplace=True)
df = df.ix[df.index.difference(currentdata.index)]
df.index.names = ["ID_BB_UNIQUE", "LAST_UPDATE_DT"]
df.reset_index(inplace=True)

to_insert = df[
    [
        "ID_BB_UNIQUE",
        "LAST_UPDATE_DT",
        "PX_LAST",
        "LN_CURRENT_MARGIN",
        "AMT_OUTSTANDING",
        "PRICING_SOURCE",
    ]
]
to_insert.columns = [
    "id_bb_unique",
    "pricingdate",
    "price",
    "loan_margin",
    "amount_outstanding",
    "source",
]
to_insert.to_sql("bloomberg_corp", engine, if_exists="append", index=False)