aboutsummaryrefslogtreecommitdiffstats
path: root/python/task_server/globeop.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/task_server/globeop.py')
-rw-r--r--python/task_server/globeop.py21
1 files changed, 10 insertions, 11 deletions
diff --git a/python/task_server/globeop.py b/python/task_server/globeop.py
index e95823c8..4f4def68 100644
--- a/python/task_server/globeop.py
+++ b/python/task_server/globeop.py
@@ -1,8 +1,9 @@
import os
import os.path
+from . import DAILY_DIR
from ftplib import FTP
import gnupg
-from task_server import config
+from . import config
import re
import logging
import sys
@@ -93,7 +94,7 @@ def download_data(workdate):
if not available_files:
logger.error("no file available for date: %s" % str(workdate))
return
- reports_dir = Path(os.environ['DAILY_DIR']) / str(workdate) / "Reports"
+ reports_dir = DAILY_DIR / str(workdate) / "Reports"
if not reports_dir.exists():
reports_dir.mkdir(parents=True)
@@ -121,7 +122,7 @@ def download_data(workdate):
insert_todb(workdate)
def insert_todb(workdate):
- reports_dir = Path(os.environ['DAILY_DIR']) / str(workdate) / "Reports"
+ reports_dir = DAILY_DIR / str(workdate) / "Reports"
engine = create_engine('postgresql://dawn_user@debian/dawndb')
for report in ["Valuation", "Pnl", "CDS"]:
fun = getattr(load_globeop_report, f"read_{report.lower()}_report")
@@ -145,24 +146,22 @@ def upload_bond_marks(engine, workdate):
params=(workdate.date(),))
df.rename(columns={'identifier': 'IDENTIFIER',
'price': 'Price'}, inplace=True)
- filename = 'securitiesNpv{0:%Y%m%d_%H%M%S}.csv'.format(workdate)
- fullpath = os.path.join(os.environ['DAILY_DIR'], str(workdate.date()), filename)
+ fullpath = DAILY_DIR / str(workdate.date()) / f"securitiesNpv{workdate:%Y%m%d_%H%M%S}.csv"
df.to_csv(fullpath, index=False)
ftp = get_ftp('incoming')
- with open(fullpath, "rb") as fh:
- ftp.storbinary('STOR ' + filename, fh)
+ with fullpath.open("rb") as fh:
+ ftp.storbinary('STOR ' + fullpath.name, fh)
logger.info("upload bond marks done")
def upload_cds_marks(engine, workdate):
df = pd.read_sql_query("""SELECT cds.dealid AS "DealID", 'CREDIT_SWAP' AS "Instrument Type",
(a.clean_nav+a.accrued) AS "NPV" from list_abscds_marks(%s) a
JOIN cds USING (security_id)""", engine, params = (workdate.date(),))
- filename = 'otcNpv{0:%Y%m%d}.csv'.format(workdate)
- fullpath = os.path.join(os.environ['DAILY_DIR'], str(workdate.date()), filename)
+ fullpath = DAILY_DIR / str(workdate.date()) / f"otcNpv{workdate:%Y%m%d}.csv"
df.to_csv(fullpath, index=False)
ftp = get_ftp('incoming')
- with open(fullpath, "rb") as fh:
- ftp.storbinary('STOR ' + filename, fh)
+ with fullpath.open("rb") as fh:
+ ftp.storbinary('STOR ' + fullpath.name, fh)
logger.info("upload cds marks done")
def upload_data(engine, workdate):