diff options
Diffstat (limited to 'python/task_server/globeop.py')
| -rw-r--r-- | python/task_server/globeop.py | 30 |
1 files changed, 16 insertions, 14 deletions
diff --git a/python/task_server/globeop.py b/python/task_server/globeop.py index 11421c69..58d46c6d 100644 --- a/python/task_server/globeop.py +++ b/python/task_server/globeop.py @@ -5,9 +5,7 @@ import gnupg from task_server import config
import re
import logging
-import shutil
import sys
-import pandas as pd
from sqlalchemy import create_engine
sys.path.append('..')
import load_globeop_report
@@ -40,6 +38,7 @@ def key_fun(s): KD = pd.datetime.strptime(regex.group(1), "%Y%m%d.%H%M%S")
return (PED, KD)
+
def run_date(s):
if 'SWO' in s:
date_string = s.split("_", 5)[4]
@@ -47,14 +46,16 @@ def run_date(s): date_string = s.split("_", 3)[2]
return pd.datetime.strptime(date_string, "%Y%m%d.%H%M%S")
+
def get_ftp(folder):
ftp = FTP('ftp.globeop.com')
ftp.login('srntsftp', config.ftp_password)
ftp.cwd(folder)
return ftp
+
def get_gpg():
- if os.name=='nt':
+ if os.name == 'nt':
gpg = gnupg.GPG(gpgbinary=r'"c:\\Program Files (x86)\\GNU\\GnuPG\\gpg2.exe"',
gnupghome=os.path.join(os.getenv('APPDATA'), "gnupg"))
elif os.name == 'posix':
@@ -64,18 +65,18 @@ def get_gpg(): def convert_to_csv(f):
if os.path.exists(f + ".xls"):
- df = pd.read_excel(f + ".xls", sheet_name=0, skiprows=[0,1,2,3])
+ df = pd.read_excel(f + ".xls", sheet_name=0, skiprows=[0, 1, 2, 3])
df.to_csv(f + ".csv", index=False)
os.remove(f + ".xls")
def download_data(workdate):
ftp = get_ftp('outgoing')
files = ftp.nlst()
- pnlfiles = [filename for filename in files if "csv" in filename and \
+ pnlfiles = [filename for filename in files if "csv" in filename and
"Profit" in filename if get_ped(filename) < workdate]
- valuationfiles = [filename for filename in files if "csv" in filename and \
+ valuationfiles = [filename for filename in files if "csv" in filename and
"Valuation_TradeID" in filename if get_ped(filename) < workdate]
- cdsfiles = [filename for filename in files if "TradeSearch" in filename \
+ cdsfiles = [filename for filename in files if "TradeSearch" in filename
if run_date(filename).date() <= workdate]
available_files = []
@@ -89,7 +90,7 @@ def download_data(workdate): if not available_files:
logger.error("no file available for date: %s" % str(workdate))
return
-
+ import pdb; pdb.set_trace()
reports_dir = os.path.join(os.environ['DAILY_DIR'], str(workdate), "Reports")
if not os.path.exists(reports_dir):
os.makedirs(reports_dir)
@@ -108,12 +109,12 @@ def download_data(workdate): else:
newfilename = "CDS_Report.xls"
with open(os.path.join(reports_dir, filename), "rb") as fh:
- dec = gpg.decrypt_file(fh, output = os.path.join(reports_dir, newfilename),
+ dec = gpg.decrypt_file(fh, output=os.path.join(reports_dir, newfilename),
passphrase=config.key_password,
always_trust=True)
logger.info('{0}: {1}'.format(filename, dec.status))
os.remove(os.path.join(reports_dir, filename))
- ## convert xls to csv
+ # convert xls to csv
convert_to_csv(os.path.join(reports_dir, "CDS_Report"))
insert_todb(workdate)
@@ -138,9 +139,10 @@ def insert_todb(workdate): df.to_sql(table, engine, if_exists='append', index=False)
def upload_bond_marks(engine, workdate):
- df = pd.read_sql_query("SELECT * from list_marks(%s)", engine, params = (workdate.date(),))
- df.rename(columns = {'identifier': 'IDENTIFIER',
- 'price': 'Price'}, inplace=True)
+ df = pd.read_sql_query("SELECT * from list_marks(%s)", engine,
+ params=(workdate.date(),))
+ df.rename(columns={'identifier': 'IDENTIFIER',
+ 'price': 'Price'}, inplace=True)
filename = 'securitiesNpv{0:%Y%m%d_%H%M%S}.csv'.format(workdate)
fullpath = os.path.join(os.environ['DAILY_DIR'], str(workdate.date()), filename)
df.to_csv(fullpath, index=False)
@@ -165,7 +167,7 @@ def upload_data(engine, workdate): upload_bond_marks(engine, workdate)
upload_cds_marks(engine, workdate)
-def back_fill(start_date=pd.datetime(2017,7,20)):
+def back_fill(start_date=pd.datetime(2017, 7, 20)):
date_rng = pd.date_range(start=start_date, end=pd.Timestamp.today(), freq='B')
for date in date_rng:
insert_todb(date.date())
|
