aboutsummaryrefslogtreecommitdiffstats
path: root/python/import_cds_quotes.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/import_cds_quotes.py')
-rw-r--r--python/import_cds_quotes.py102
1 files changed, 0 insertions, 102 deletions
diff --git a/python/import_cds_quotes.py b/python/import_cds_quotes.py
deleted file mode 100644
index a5328b21..00000000
--- a/python/import_cds_quotes.py
+++ /dev/null
@@ -1,102 +0,0 @@
-import os
-from common import root
-import csv
-import datetime
-from db import serenitasdb
-import re, sys
-from pandas.tseries.offsets import BDay
-import pdb
-
-def convert(x):
- try:
- return float(x[:-1])
- except ValueError:
- return None
-
-def get_current_tickers(database, workdate):
- sqlstr = "SELECT markit_ticker, markit_tier, cds_curve from index_members(%s, %s)"
- markit_bbg_mapping = {}
- all_tickers = set([])
- if workdate >= datetime.date(2014, 9, 19):
- doc_clause = 'XR14'
- else:
- doc_clause = 'XR'
- for index in ['HY9', 'HY10', 'HY15', 'HY17', 'HY19', 'HY21', 'IG9', 'IG19', 'IG21']:
- spread=0.05 if 'HY' in index else 0.01
- with database.cursor() as c:
- c.execute(sqlstr, (index, workdate))
- for line in c:
- all_tickers.add((line['markit_ticker'], line['markit_tier']))
- key = (line['markit_ticker'], line['markit_tier'], 'USD', doc_clause, spread)
- hykey = key[:-1]+(0.05,)
- if hykey in markit_bbg_mapping:
- del markit_bbg_mapping[hykey] ## we only keep the tightest quote
- markit_bbg_mapping[key] = line['cds_curve']
-
- return (all_tickers, markit_bbg_mapping)
-
-def insert_cds(database, workdate):
- all_tickers, markit_bbg_mapping = get_current_tickers(database, workdate)
- filename = "cds eod {0}.csv".format(datetime.datetime.strftime(workdate, "%Y%m%d"))
- colnames = ['Upfront'+tenor for tenor in ['6m', '1y', '2y', '3y', '4y', '5y', '7y', '10y']]
- sqlstr = "INSERT INTO cds_quotes(date, curve_ticker, upfrontbid, upfrontask," \
- "runningbid, runningask, source, recovery) VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
-
- tickers_found = set([])
- with database.cursor() as c:
- c.execute("DELETE from cds_quotes where date=%s", (workdate,))
- database.commit()
- with open(os.path.join(root, "Tranche_data", "CDS", filename)) as fh:
- csvreader = csv.DictReader(fh)
- with database.cursor() as c:
- for line in csvreader:
- tickers_found.add((line['Ticker'], line['Tier']))
- k = (line['Ticker'], line['Tier'], line['Ccy'], line['DocClause'], float(line['RunningCoupon']))
- try:
- c.executemany(sqlstr,
- [(workdate, t, convert(line[colnames[i]]), convert(line[colnames[i]]),
- float(line['RunningCoupon'])*10000, float(line['RunningCoupon'])*10000,
- 'MKIT', convert(line['RealRecovery'])/100)
- for i, t in enumerate(markit_bbg_mapping[k])])
- except KeyError:
- continue
- database.commit()
- print(all_tickers-tickers_found)
-
-def insert_cds_single(database, workdate, bbgtickers, mkt_tuple):
- filename = "cds eod {0}.csv".format(datetime.datetime.strftime(workdate, "%Y%m%d"))
- colnames = ['Upfront'+tenor for tenor in ['6m', '1y', '2y', '3y', '4y', '5y', '7y', '10y']]
- sqlstr = "INSERT INTO cds_quotes(date, curve_ticker, upfrontbid, upfrontask," \
- "runningbid, runningask, source, recovery) VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
- with open(os.path.join(root, "Tranche_data", "CDS", filename)) as fh:
- csvreader = csv.DictReader(fh)
- with database.cursor() as c:
- for line in csvreader:
- if (line['Ticker'], line['Tier'], line['DocClause'], line['RunningCoupon'], line['Ccy']) == \
- mkt_tuple:
- try:
- c.executemany(sqlstr,
- [(workdate, t, convert(line[colnames[i]]), convert(line[colnames[i]]),
- float(line['RunningCoupon'])*10000, float(line['RunningCoupon'])*10000,
- 'MKIT', convert(line['RealRecovery'])/100)
- for i, t in enumerate(bbgtickers)])
- except psycopg2.IntegrityError:
- database.rollback()
- database.commit()
-
-if __name__=="__main__":
- if len(sys.argv)>=2:
- workdate = datetime.datetime.strptime(sys.argv[1], "%Y-%m-%d")
- else:
- workdate = datetime.datetime.today()-BDay(1)
- workdate = workdate.date()
- insert_cds(serenitasdb, workdate)
- ## backpopulate single ticker
- # bbgtickers = ['CT675194bis', 'CT406651bis', 'CT406655bis', 'CX404662bis', 'CX404666bis',
- # 'CX404670bis', 'CX404678bis', 'CX404690bis']
- # mkt_tuple = ('REALOGR', 'SNRFOR', 'XR', "0.05", 'USD')
- # for f in os.listdir(os.path.join(root, "Tranche_data", "CDS")):
- # if f.endswith("csv"):
- # workdate = datetime.datetime.strptime(f.split(" ")[2].split(".")[0], "%Y%m%d")
- # workdate = workdate.date()
- # insert_cds_single(serenitasdb, workdate, bbgtickers, mkt_tuple)