diff options
| -rw-r--r-- | python/task_server/globeop.py | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/python/task_server/globeop.py b/python/task_server/globeop.py index a69f4abf..83b8f307 100644 --- a/python/task_server/globeop.py +++ b/python/task_server/globeop.py @@ -1,3 +1,4 @@ +import datetime import os import os.path from . import DAILY_DIR @@ -7,7 +8,6 @@ from . import config import re import logging import sys -from pathlib import Path from sqlalchemy import create_engine sys.path.append('..') import load_globeop_report @@ -36,7 +36,7 @@ def key_fun(s): if regex: KD = pd.datetime.strptime(regex.group(1), "%Y-%m-%d-%H-%M-%S") else: - regex = re.search("([^.]+\.[^.]+)", s) + regex = re.search(r"([^.]+\.[^.]+)", s) KD = pd.datetime.strptime(regex.group(1), "%Y%m%d.%H%M%S") return (PED, KD) @@ -73,7 +73,7 @@ def convert_to_csv(f): df.to_csv(f.parent / f"{name}_Report.csv", index=False) f.unlink() -def download_data(workdate): +def download_data(workdate: datetime.date): ftp = get_ftp('outgoing') files = ftp.nlst() pnlfiles = [filename for filename in files if "csv" in filename and @@ -121,7 +121,7 @@ def download_data(workdate): convert_to_csv(reports_dir / "CDS_Report.xls") insert_todb(workdate) -def insert_todb(workdate): +def insert_todb(workdate: datetime.date): reports_dir = DAILY_DIR / str(workdate) / "Reports" engine = create_engine('postgresql://dawn_user@debian/dawndb') for report in ["Valuation", "Pnl", "CDS"]: @@ -141,7 +141,7 @@ def insert_todb(workdate): engine.execute(sql_str, (period_end_date,)) df.to_sql(table, engine, if_exists='append', index=False) -def upload_bond_marks(engine, workdate): +def upload_bond_marks(engine, workdate: datetime.datetime): df = pd.read_sql_query("SELECT identifier, price from list_marks(%s) " "RIGHT JOIN list_positions(%s, NULL, False) " "USING (identifier)", engine, @@ -155,10 +155,10 @@ def upload_bond_marks(engine, workdate): ftp.storbinary('STOR ' + fullpath.name, fh) logger.info("upload bond marks done") -def upload_cds_marks(engine, workdate): +def upload_cds_marks(engine, workdate: datetime.datetime): df = pd.read_sql_query("""SELECT cds.dealid AS "DealID", 'CREDIT_SWAP' AS "Instrument Type", (a.clean_nav+a.accrued) AS "NPV" from list_abscds_marks(%s) a -JOIN cds USING (security_id)""", engine, params = (workdate.date(),)) +JOIN cds USING (security_id)""", engine, params=(workdate.date(),)) fullpath = DAILY_DIR / str(workdate.date()) / f"otcNpv{workdate:%Y%m%d}.csv" df.to_csv(fullpath, index=False) ftp = get_ftp('incoming') @@ -166,7 +166,7 @@ JOIN cds USING (security_id)""", engine, params = (workdate.date(),)) ftp.storbinary('STOR ' + fullpath.name, fh) logger.info("upload cds marks done") -def upload_data(engine, workdate): +def upload_data(engine, workdate: datetime.datetime): upload_bond_marks(engine, workdate) upload_cds_marks(engine, workdate) |
