aboutsummaryrefslogtreecommitdiffstats
path: root/python/import_quotes.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/import_quotes.py')
-rw-r--r--python/import_quotes.py100
1 files changed, 37 insertions, 63 deletions
diff --git a/python/import_quotes.py b/python/import_quotes.py
index 866ac54b..e87e8a6c 100644
--- a/python/import_quotes.py
+++ b/python/import_quotes.py
@@ -19,34 +19,44 @@ def convert(x):
index_list = ['HY9', 'HY10', 'HY15', 'HY17', 'HY19', 'HY21', 'HY22', 'HY23',
'IG9', 'IG19', 'IG21', 'IG22', 'IG23', 'XO22', 'EU9', 'EU19', 'EU21']
-def doc_clause_from_index(index, date):
- if index[:2] in ['HY', 'IG']:
- if date>=datetime.date(2014, 9, 19):
- return 'XR14'
- else:
- return 'XR'
- else:
- if date>=datetime.date(2014, 9, 19):
- return 'MM14'
- else:
- return 'MM'
+doc_clause_mapping14 = {'Full Restructuring': 'MM14',
+ 'No Restructuring': 'XR14',
+ 'Modified Modified Restructurin': 'MM14'}
-def get_current_tickers(database, workdate):
- sqlstr = "SELECT markit_ticker, markit_tier, spread, currency, cds_curve from index_members(%s, %s)"
+doc_clause_mapping = {'Full Restructuring': 'MM',
+ 'No Restructuring': 'XR',
+ 'Modified Modified Restructurin': 'MM'}
+
+def get_markit_bbg_mapping(database, basketid_list, workdate):
+ if workdate>=datetime.date(2014, 9, 19):
+ doc_clause_mapping = doc_clause_mapping14
markit_bbg_mapping = {}
all_tickers = set([])
- for index in index_list:
- doc_clause = doc_clause_from_index(index, workdate)
- with database.cursor() as c:
- c.execute(sqlstr, (index, workdate))
- for line in c:
- all_tickers.add((line['markit_ticker'], line['markit_tier']))
- key = (line['markit_ticker'], line['markit_tier'], line['currency'],
- doc_clause, float(line['spread'])/10000)
- markit_bbg_mapping[key] = line['cds_curve']
-
+ with database.cursor() as c:
+ c.execute("SELECT markit_ticker, markit_tier, spread, currency, cds_curve, " \
+ " doc_clause FROM cds_issuers where index_list && %s", (basketid_list,))
+ for line in c:
+ all_tickers.add((line['markit_ticker'], line['markit_tier']))
+ key = (line['markit_ticker'], line['markit_tier'], line['currency'],
+ doc_clause_mapping[line['doc_clause']], float(line['spread'])/10000)
+ markit_bbg_mapping[key] = line['cds_curve']
+ database.commit()
return (all_tickers, markit_bbg_mapping)
+
+def get_basketids(database, index_list, workdate):
+ r = []
+ with database.cursor() as c:
+ for index in index_list:
+ c.execute("SELECT * FROM nameToBasketID(%s, %s)", (index, workdate))
+ r.append(c.fetchone()[0])
+ database.commit()
+ return r
+
+def get_current_tickers(database, workdate):
+ basketid_list = get_basketids(database, index_list, workdate)
+ return get_markit_bbg_mapping(database, basketid_list, workdate)
+
def insert_cds(database, workdate):
all_tickers, markit_bbg_mapping = get_current_tickers(database, workdate)
filename = "cds eod {0}.csv".format(datetime.datetime.strftime(workdate, "%Y%m%d"))
@@ -73,27 +83,6 @@ def insert_cds(database, workdate):
database.commit()
print(all_tickers-tickers_found)
-def insert_cds_single(database, workdate, bbg_tickers, mkt_tuple):
- filename = "cds eod {0}.csv".format(datetime.datetime.strftime(workdate, "%Y%m%d"))
- colnames = ['Upfront'+tenor for tenor in ['6m', '1y', '2y', '3y', '4y', '5y', '7y', '10y']]
- sqlstr = "INSERT INTO cds_quotes(date, curve_ticker, upfrontbid, upfrontask," \
- "runningbid, runningask, source, recovery) VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
- with open(os.path.join(root, "Tranche_data", "CDS", filename)) as fh:
- csvreader = csv.DictReader(fh)
- with database.cursor() as c:
- for line in csvreader:
- if (line['Ticker'], line['Tier'], line['DocClause'], line['RunningCoupon'], line['Ccy']) == \
- mkt_tuple:
- try:
- c.executemany(sqlstr,
- [(workdate, t, convert(line[colnames[i]]), convert(line[colnames[i]]),
- float(line['RunningCoupon'])*10000, float(line['RunningCoupon'])*10000,
- 'MKIT', convert(line['RealRecovery'])/100)
- for i, t in enumerate(bbg_tickers)])
- except psycopg2.IntegrityError:
- database.rollback()
- database.commit()
-
def insert_index(engine, workdate):
basedir = os.path.join(root, 'Tranche_data', 'Composite_reports')
filenames = [os.path.join(basedir, f) for f in os.listdir(basedir) if 'Indices' in f]
@@ -131,27 +120,12 @@ if __name__=="__main__":
workdate = workdate.date()
engine = create_engine('postgresql://serenitas_user:Serenitas1@debian/serenitasdb')
insert_cds(serenitasdb, workdate)
- insert_index(engine, workdate)
- ## backpopulate single ticker
- # company_id = 16632863
- # with serenitasdb.cursor() as c:
- # c.execute("select cds_curve, markit_ticker, markit_tier from cds_issuers where company_id=%s",
- # (company_id,))
- # tup=c.fetchone()
- # bbg_tickers= tup[0]
- # for f in os.listdir(os.path.join(root, "Tranche_data", "CDS")):
- # if f.endswith("csv"):
- # workdate = datetime.datetime.strptime(f.split(" ")[2].split(".")[0], "%Y%m%d")
- # workdate = workdate.date()
- # if workdate >= datetime.date(2014, 9, 19):
- # mkt_tuple = (tup[1], tup[2]) + ('XR14', '0.05', 'USD')
- # else:
- # mkt_tuple = (tup[1], tup[2]) + ('XR', '0.05', 'USD')
- # insert_cds_single(serenitasdb, workdate, bbg_tickers, mkt_tuple)
- # serenitasdb.close()
+ # insert_index(engine, workdate)
+ serenitasdb.close()
+
# for f in os.listdir(os.path.join(root, "Tranche_data", "CDS")):
# if f.endswith("csv"):
# workdate = datetime.datetime.strptime(f.split(" ")[2].split(".")[0], "%Y%m%d")
# workdate = workdate.date()
# insert_cds(serenitasdb, workdate)
- serenitasdb.close()
+ # serenitasdb.close()