aboutsummaryrefslogtreecommitdiffstats
path: root/python/task_server/globeop.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/task_server/globeop.py')
-rw-r--r--python/task_server/globeop.py47
1 files changed, 24 insertions, 23 deletions
diff --git a/python/task_server/globeop.py b/python/task_server/globeop.py
index 397b1f47..91e5f5a0 100644
--- a/python/task_server/globeop.py
+++ b/python/task_server/globeop.py
@@ -6,8 +6,11 @@ from task_server import config
import re
import logging
import shutil
+import sys
import pandas as pd
from sqlalchemy import create_engine
+sys.path.append('..')
+import load_globeop_report
logger = logging.getLogger(__name__)
@@ -59,6 +62,12 @@ def get_gpg():
gpg.encoding = 'utf8'
return gpg
+def convert_to_csv(f):
+ if os.path.exists(f + ".xls"):
+ df = pd.read_excel(f + ".xls", sheetname=0, skiprows=[0,1,2,3])
+ df.to_csv(f + ".csv", index=False)
+ os.remove(f + ".xls")
+
def download_data(workdate):
ftp = get_ftp('outgoing')
files = ftp.nlst()
@@ -67,7 +76,7 @@ def download_data(workdate):
valuationfiles = [filename for filename in files if "csv" in filename and \
"Valuation_TradeID" in filename if get_ped(filename) < workdate]
cdsfiles = [filename for filename in files if "TradeSearch" in filename \
- if run_date(filename).date()<=workdate]
+ if run_date(filename).date() <= workdate]
available_files = []
if pnlfiles:
available_files.append(sorted(pnlfiles, key=key_fun, reverse=True)[0])
@@ -92,7 +101,7 @@ def download_data(workdate):
gpg = get_gpg()
for filename in available_files:
if "Profit" in filename:
- newfilename = "Pnl.csv"
+ newfilename = "Pnl_Report.csv"
elif "Valuation" in filename:
newfilename = "Valuation_Report.csv"
else:
@@ -103,30 +112,22 @@ def download_data(workdate):
always_trust=True)
logger.info('{0}: {1}'.format(filename, dec.status))
os.remove(os.path.join(reports_dir, filename))
- if os.path.exists(os.path.join(reports_dir, "CDS_Report.xls")):
- df = pd.read_excel(os.path.join(reports_dir, "CDS_Report.xls"), sheetname=0, skiprows=[0,1,2,3])
- df.to_csv(os.path.join(reports_dir, "CDS_Report.csv"), index=False)
- os.remove(os.path.join(reports_dir, "CDS_Report.xls"))
+ ## convert xls to csv
+ convert_to_csv(os.path.join(reports_dir, "CDS_Report"))
engine = create_engine('postgresql://dawn_user@debian/dawndb')
- for f, table in zip(["Valuation_Report.csv", "Pnl.csv"],
- ["val_reports", "pnl_reports"]):
- df = pd.read_csv(os.path.join(reports_dir, f))
- if 'PeriodEndDate' in df:
- period_end_date = pd.Timestamp(df.PeriodEndDate[0])
- df['row'] = df.index
- if 'AccountingPeriod' in df:
- del df['AccountingPeriod']
- df.Strat = df.Strat.str.replace("^(SERCGMAST__){1,2}(M_|SER_)?", "", 1)
- df.Port = df.Port.str.replace("^(SERCGMAST__){1,2}(SERG__|SERG_)?", "", 1)
- df.columns = df.columns.str.lower()
- if f == "Pnl.csv":
- df['longshortindicator'] = df['longshortindicator'].str.strip()
- df.columns = df.columns.str.replace(" ", "")
- df['date'] = period_end_date
- sql_str = "DELETE FROM pnl_reports WHERE date=%s"
+ for report in ["Valuation", "Pnl", "CDS"]:
+ fun = getattr(load_globeop_report, "read_{}_report".format(report.lower()))
+ table = "{}_reports".format(report.lower())
+
+ df = fun(os.path.join(reports_dir, "{}_Report.csv".format(report)))
+ if report == "Valuation":
+ period_end_date = pd.Timestamp(df.periodenddate[0])
+ sql_str = "DELETE FROM valuation_reports WHERE periodenddate=%s"
else:
- sql_str = "DELETE FROM val_reports WHERE periodenddate=%s"
+ df['date'] = period_end_date
+ sql_str = "DELETE FROM {} WHERE date=%s".format(table)
+ df['row'] = df.index
engine.execute(sql_str, (period_end_date,))
df.to_sql(table, engine, if_exists='append', index=False)