aboutsummaryrefslogtreecommitdiffstats
path: root/python/task_server
diff options
context:
space:
mode:
Diffstat (limited to 'python/task_server')
-rw-r--r--python/task_server/globeop.py41
1 files changed, 20 insertions, 21 deletions
diff --git a/python/task_server/globeop.py b/python/task_server/globeop.py
index ae763b4f..18a0790a 100644
--- a/python/task_server/globeop.py
+++ b/python/task_server/globeop.py
@@ -6,6 +6,7 @@ from task_server import config
import re
import logging
import sys
+from pathlib import Path
from sqlalchemy import create_engine
sys.path.append('..')
import load_globeop_report
@@ -64,12 +65,11 @@ def get_gpg():
return gpg
def convert_to_csv(f):
- if os.path.exists(f + ".xls"):
+ if f.exists():
for sheet in ["Credit Default Swap", "Swaption"]:
- df = pd.read_excel(f + ".xls", sheet_name=sheet,
- skiprows=[0, 1, 2, 3])
- df.to_csv(f"{sheet}.csv", index=False)
- os.remove(f + ".xls")
+ df = pd.read_excel(f, sheet_name=sheet, skiprows=[0, 1, 2, 3])
+ df.to_csv(f.parent / f"{sheet}.csv", index=False)
+ f.unlink()
def download_data(workdate):
ftp = get_ftp('outgoing')
@@ -92,14 +92,13 @@ def download_data(workdate):
if not available_files:
logger.error("no file available for date: %s" % str(workdate))
return
- reports_dir = os.path.join(os.environ['DAILY_DIR'], str(workdate), "Reports")
- if not os.path.exists(reports_dir):
- os.makedirs(reports_dir)
+ reports_dir = Path(os.environ['DAILY_DIR']) / str(workdate) / "Reports"
+ if not reports_dir.exists():
+ reports_dir.mkdir(parents=True)
for filename in available_files:
- with open(os.path.join(reports_dir, filename), "wb") as fh:
- ftp.retrbinary('RETR ' + filename, fh.write)
- logger.info("downloaded {0}".format(filename))
+ ftp.retrbinary('RETR ' + filename, (reports_dir / filename).write_bytes)
+ logger.info(f"downloaded {filename}")
gpg = get_gpg()
for filename in available_files:
@@ -109,24 +108,24 @@ def download_data(workdate):
newfilename = "Valuation_Report.csv"
else:
newfilename = "CDS_Report.xls"
- with open(os.path.join(reports_dir, filename), "rb") as fh:
- dec = gpg.decrypt_file(fh, output=os.path.join(reports_dir, newfilename),
+ with (reports_dir / filename).open("rb") as fh:
+ dec = gpg.decrypt_file(fh, output=(reports_dir / newfilename).as_posix(),
passphrase=config.key_password,
always_trust=True)
- logger.info('{0}: {1}'.format(filename, dec.status))
- os.remove(os.path.join(reports_dir, filename))
+ logger.info(f'{filename}: {dec.status}')
+ (reports_dir / filename).unlink()
# convert xls to csv
- convert_to_csv(os.path.join(reports_dir, "CDS_Report"))
+ convert_to_csv(reports_dir / "CDS_Report.xls")
insert_todb(workdate)
def insert_todb(workdate):
- reports_dir = os.path.join(os.environ['DAILY_DIR'], str(workdate), "Reports")
+ reports_dir = Path(os.environ['DAILY_DIR']) / str(workdate) / "Reports"
engine = create_engine('postgresql://dawn_user@debian/dawndb')
for report in ["Valuation", "Pnl", "CDS"]:
- fun = getattr(load_globeop_report, "read_{}_report".format(report.lower()))
- table = "{}_reports".format(report.lower())
- report_file = os.path.join(reports_dir, "{}_Report.csv".format(report))
- if not os.path.exists(report_file):
+ fun = getattr(load_globeop_report, f"read_{report.lower()}_report")
+ table = f"{report.lower()}_reports"
+ report_file = reports_dir / f"{report}_Report.csv"
+ if not report_file.exists():
continue
df = fun(report_file)
if report == "Valuation":