diff options
Diffstat (limited to 'python/markit_cds.py')
| -rw-r--r-- | python/markit_cds.py | 73 |
1 files changed, 0 insertions, 73 deletions
diff --git a/python/markit_cds.py b/python/markit_cds.py deleted file mode 100644 index bb2ba2a0..00000000 --- a/python/markit_cds.py +++ /dev/null @@ -1,73 +0,0 @@ -import requests
-import datetime
-from common import root
-import os
-import csv
-import sys
-import zipfile, io
-import shutil
-import pandas as pd
-from pandas.tseries.offsets import BDay
-from db import dbconn
-from import_quotes import insert_cds, insert_index, insert_tranche
-from sqlalchemy import create_engine
-
-def convertToNone(v):
- return v if v else None
-
-def download_cds_data(payload):
- r = requests.get('https://www.markit.com/export.jsp', params=payload)
- f2 = open(os.path.join(root, "Tranche_data", "CDS", "cds eod {0}.csv".format(payload['date'])), "wb")
- with zipfile.ZipFile(io.BytesIO(r.content)) as z:
- for f in z.namelist():
- if "csv" in f:
- f1 = z.open(f)
- next(f1)
- next(f1)
- shutil.copyfileobj(f1, f2)
- f1.close()
- f2.close()
-
-def download_composite_data(payload, historical=False):
- ## if historical, we want to maintain the invariant mtime(f)== payload['date'] + BDay(1)
- if historical:
- ts = datetime.datetime.strptime(payload['date'], "%Y%m%d") + BDay(1)
- ts = ts.timestamp()
- for report in ['COMPOSITES', 'TRANCHE_COMPOSITES']:
- for family in ['CDX', 'ITRAXX-EUROPE']:
- payload.update({'family': family, 'report': report})
- r = requests.get('https://www.markit.com/export.jsp', params=payload)
- try:
- with zipfile.ZipFile(io.BytesIO(r.content)) as z:
- for f in z.namelist():
- if "csv" in f:
- path = z.extract(f, path=os.path.join(root, "Tranche_data", "Composite_reports"))
- if historical:
- os.utime(path, (ts, ts))
- except zipfile.BadZipfile:
- print(r.content)
- continue
-
-if __name__=="__main__":
- if len(sys.argv) > 1:
- workdate = pd.datetime.strptime(sys.argv[1], "%Y-%m-%d")
- historical = True
- else:
- workdate = pd.datetime.today()-BDay(1)
- historical = False
- payload = {'user': 'GuillaumeHorel',
- 'password': 'password',
- 'version': '5',
- 'format': 'csv',
- 'report': 'FIXED_COUPON',
- 'date': workdate.strftime("%Y%m%d"),
- 'type': 'CDS'}
- download_cds_data(payload)
- payload.update({'type':'CredIndex','version':4})
- download_composite_data(payload, historical)
- engine = create_engine('postgresql://serenitas_user@debian/serenitasdb')
- serenitasdb = dbconn('serenitasdb')
- insert_cds(serenitasdb, workdate.date())
- insert_index(engine, workdate.date())
- insert_tranche(engine, workdate.date())
- serenitasdb.close()
|
