1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
|
from bbg_helpers import init_bbg_session, retrieve_data, BBG_IP
import pandas as pd
from sqlalchemy import create_engine
from pandas.tseries.offsets import BDay
from pandas import bdate_range
import re
import os
import logging
def get_list(engine, workdate=None, asset_class=None, include_unsettled=True):
if workdate:
positions = pd.read_sql_query("select identifier, bbg_type from list_positions(%s, %s, %s)",
engine,
params=(workdate.date(), asset_class, include_unsettled))
positions.loc[positions.identifier.str.len() <= 11, 'cusip'] = positions.identifier.str.slice(stop=9)
positions.loc[positions.identifier.str.len() == 12, 'isin'] = positions.identifier
else:
positions = pd.read_sql_table("securities", engine)
positions['bbg_id'] = positions.cusip.where(positions.cusip.notnull(), positions['isin']) + \
' ' + positions.bbg_type
positions.set_index('bbg_id', inplace=True)
return positions
def backpopulate_marks(begin_str='2015-01-15', end_str='2015-07-15'):
pattern = re.compile("\d{4}-\d{2}-\d{2}")
list_of_daily_folder = (fullpath for (fullpath, _, _) in os.walk('/home/share/Daily')
if pattern.match(os.path.basename(fullpath)))
list_of_bdays = bdate_range(start=begin_str, end=end_str)
for path in list_of_daily_folder:
date = pd.to_datetime(os.path.basename(path))
if date in list_of_bdays:
marks_file = [f for f in os.listdir(path) if f.startswith("securitiesNpv")]
if marks_file:
marks_file.sort(key=lambda x:x[13:], reverse=True) #sort by lexicographic order which is what we want since we use ISO dates
marks = pd.read_csv(os.path.join(path, marks_file[0]))
positions = get_list(pd.to_datetime(date))
positions = positions.merge(marks, left_on='identifier', right_on='IDENTIFIER')
positions.drop(['IDENTIFIER', 'last_settle_date'], axis=1, inplace=True)
positions['date'] = date
positions.rename(columns={'Price': 'price'}, inplace=True)
positions = positions.drop_duplicates()
positions.to_sql('position', engine, if_exists='append', index=False)
def update_securities(engine, session):
field = {'Corp': 'PREV_CPN_DT', 'Mtge': 'START_ACC_DT'}
all_securities = get_list(engine)
conn = engine.raw_connection()
for bbg_type in ['Corp', 'Mtge']:
securities = all_securities[all_securities.index.str.endswith(bbg_type)]
data = retrieve_data(session, securities.index.tolist(), [field[bbg_type], 'CUR_CPN'])
data = pd.DataFrame.from_dict(data, orient='index')
m = securities.merge(data, left_index=True, right_index=True)
with conn.cursor() as c:
c.executemany("UPDATE securities SET start_accrued_date=%({0})s "
",coupon=%(CUR_CPN)s WHERE identifier=%(identifier)s".format(field[bbg_type]),
m.to_dict('records'))
conn.commit()
def init_fx(session, engine, startdate):
currencies = ['EURUSD', 'CADUSD']
securities = [c + ' Curncy' for c in currencies]
data = retrieve_data(session, securities, ['PX_LAST'], start_date=startdate)
data = data['EURUSD Curncy'].merge(data['CADUSD Curncy'], left_on='date', right_on='date')
data.rename(columns={'PX_LAST_x': 'eurusd',
'PX_LAST_y': 'cadusd'}, inplace=True)
data.to_sql('fx', engine, index=False, if_exists='append')
def update_fx(engine, session, currencies):
securities = [c + ' Curncy' for c in currencies]
data = retrieve_data(session, securities, ['FIXED_CLOSING_PRICE_NY', 'PX_CLOSE_DT'])
colnames = ['date']
values = []
for k, v in data.items():
currency_pair = k.split(' ')[0].lower()
colnames.append(currency_pair)
values.append(v['FIXED_CLOSING_PRICE_NY'])
values = [v['PX_CLOSE_DT']] + values
sqlstr = 'INSERT INTO fx({0}) VALUES({1})'.format(",".join(colnames),
",".join(["%s"]*len(values)))
conn = engine.raw_connection()
with conn.cursor() as c:
c.execute(sqlstr, values)
conn.commit()
def populate_cashflow_history(engine, session, workdate=None):
securities = get_list(engine, workdate)
conn = engine.raw_connection()
data = retrieve_data(session, securities.index.tolist(), ['HIST_CASH_FLOW', 'MTG_HIST_CPN',
'FLT_CPN_HIST', 'HIST_INTEREST_DISTRIBUTED'])
fixed_coupons = {'XS0306416982 Mtge': 7.62,
'91927RAD1 Mtge': 6.77}
for k, v in data.items():
if 'HIST_CASH_FLOW' in v:
to_insert = v['HIST_CASH_FLOW'].merge(v['MTG_HIST_CPN'], how='left',
left_on='Payment Date',
right_on='Payment Date')
to_insert.rename(columns={'Coupon_y': 'coupon',
'Interest': 'interest',
'Payment Date': 'date',
'Principal Balance': 'principal_bal',
'Principal Paid': 'principal'}, inplace=True)
to_insert.drop(['Period Number', 'Coupon_x'], axis=1, inplace=True)
elif 'FLT_CPN_HIST' in v:
to_insert = v['FLT_CPN_HIST']
to_insert.rename(columns={'Coupon Rate': 'coupon',
'Accrual Start Date': 'date'}, inplace=True)
to_insert.coupon = to_insert.coupon.shift(1)
elif 'HIST_INTEREST_DISTRIBUTED' in v:
to_insert = v['HIST_INTEREST_DISTRIBUTED']
to_insert.rename(columns={'Interest': 'interest',
'Historical Date': 'date'}, inplace=True)
if k in fixed_coupons:
to_insert['coupon'] = fixed_coupons[k]
else: #damn you XS0299146992 !
continue
else:
logging.error("No cashflows for the given security")
identifier = securities.loc[k,'identifier']
to_insert['identifier'] = identifier
with conn.cursor() as c:
c.execute("DELETE FROM cashflow_history WHERE identifier=%s", (identifier,))
conn.commit()
to_insert.to_sql('cashflow_history', engine, if_exists='append', index=False)
with conn.cursor() as c:
c.execute("REFRESH MATERIALIZED VIEW factors_history")
conn.commit()
if __name__=="__main__":
engine = create_engine('postgresql://dawn_user@debian/dawndb')
workdate = pd.datetime.today()
with init_bbg_session(BBG_IP) as session:
update_securities(engine, session)
populate_cashflow_history(engine, session, workdate)
update_fx(engine, session, ['EURUSD', 'CADUSD'])
# # with init_bbg_session(BBG_IP) as session:
# # init_fx(session, engine, pd.datetime(2013, 1, 1))
|