aboutsummaryrefslogtreecommitdiffstats
path: root/python/markit
diff options
context:
space:
mode:
Diffstat (limited to 'python/markit')
-rw-r--r--python/markit/__main__.py46
-rw-r--r--python/markit/cds.py14
2 files changed, 43 insertions, 17 deletions
diff --git a/python/markit/__main__.py b/python/markit/__main__.py
index 913eba77..c85ef0bc 100644
--- a/python/markit/__main__.py
+++ b/python/markit/__main__.py
@@ -30,6 +30,13 @@ group.add_argument("-r", "--rates", action="store_true", help="download markit I
parser.add_argument(
"-i", "--insert-only", action="store_true", help="do not re-download data"
)
+parser.add_argument(
+ "-s",
+ "--same-day",
+ action="store_true",
+ dest="same_day",
+ help="download same day reports",
+)
parser.add_argument("workdate", nargs="?", type=datetime.date.fromisoformat)
@@ -42,7 +49,10 @@ historical = True
if args.workdate is None:
if not args.rates:
historical = False
- workdate = default_date()
+ if not args.same_day:
+ workdate = default_date()
+ else:
+ workdate = datetime.date.today()
else:
workdate = datetime.date.today()
else:
@@ -87,18 +97,30 @@ elif args.cds:
}
if not args.insert_only:
# download_cds_data(payload, workdate, "COMPOSITES")
- download_cds_data(payload, workdate)
+ if not args.same_day:
+ download_cds_data(payload, workdate)
+ else:
+ pass
+ # not permissioned
+ # payload.update({"type": "fccds", "version": "6"})
+ # download_cds_data(payload, workdate, "SAME_DAY_FC_CDS_NEWYORK_CLOSE")
payload.update({"type": "CredIndex", "version": 4})
- download_composite_data(payload, workdate, historical)
- conn = serenitas_pool.getconn()
- remove_curves(conn, workdate)
- insert_cds(conn, workdate)
- copy_curves_forward(conn, workdate)
- serenitas_pool.putconn(conn)
- if not args.insert_only:
- engine = create_engine("postgresql://serenitas_user@debian/serenitasdb")
- insert_index(engine, workdate)
- insert_tranche(engine, workdate)
+ if args.same_day:
+ download_composite_data(
+ payload, workdate, historical, ("SAME_DAY_INDICES",)
+ )
+ else:
+ download_composite_data(payload, workdate, historical)
+ if not args.same_day:
+ conn = serenitas_pool.getconn()
+ remove_curves(conn, workdate)
+ insert_cds(conn, workdate)
+ copy_curves_forward(conn, workdate)
+ serenitas_pool.putconn(conn)
+ if not args.insert_only:
+ engine = create_engine("postgresql://serenitas_user@debian/serenitasdb")
+ insert_index(engine, workdate)
+ insert_tranche(engine, workdate)
elif args.rates:
conn = serenitas_pool.getconn()
diff --git a/python/markit/cds.py b/python/markit/cds.py
index da2ca5c5..8aecb904 100644
--- a/python/markit/cds.py
+++ b/python/markit/cds.py
@@ -38,18 +38,21 @@ def download_cds_data(payload, workdate, report="FIXED_COUPON"):
shutil.copyfileobj(f1, f2)
f1.close()
except zipfile.BadZipFile:
- logger.error(content.getvalue().decode())
+ logger.error(content.getvalue().decode().strip())
-def download_composite_data(payload, workdate, historical=False):
+def download_composite_data(
+ payload, workdate, historical=False, reports=("COMPOSITES", "TRANCHE_COMPOSITES")
+):
# if historical, we want to maintain the invariant mtime(f)== payload['date'] + BDay(1)
if historical:
ts = (workdate + BDay(1)).timestamp()
- for report in ["COMPOSITES", "TRANCHE_COMPOSITES"]:
+ for report in reports:
for family in ["CDX", "ITRAXX-EUROPE"]:
payload.update({"family": family, "report": report})
- while True:
+ retry = 0
+ while retry < 10:
r = requests.post("https://www.markit.com/export.jsp", params=payload)
try:
with zipfile.ZipFile(io.BytesIO(r.content)) as z:
@@ -66,8 +69,9 @@ def download_composite_data(payload, workdate, historical=False):
if historical:
os.utime(path, (ts, ts))
except zipfile.BadZipfile:
- logger.error(r.content.decode())
+ logger.error(r.content.decode().strip())
time.sleep(5)
+ retry += 1
continue
else:
break