import os import os.path from ftplib import FTP import gnupg from task_server import config import re import logging import shutil import pandas as pd from sqlalchemy import create_engine logger = logging.getLogger(__name__) try: import pandas as pd from pandas.tseries.offsets import BDay except ImportError: pass if os.name =='nt': root = "//diskstation/Daily" elif os.name == 'posix': root = '/home/serenitas/Daily' def get_ped(s): regex = re.search("PED=([^.]+)", s) if regex: PED = pd.datetime.strptime(regex.group(1), "%Y-%m-%d").date() else: regex = re.search("([^.]+)", s) PED = pd.to_datetime(regex.group(1), "%Y%m%d") - BDay(1) PED = PED.date() return PED def key_fun(s): PED = get_ped(s) regex = re.search("KD=([^.]+)", s) if regex: KD = pd.datetime.strptime(regex.group(1), "%Y-%m-%d-%H-%M-%S") else: regex = re.search("([^.]+\.[^.]+)", s) KD = pd.datetime.strptime(regex.group(1), "%Y%m%d.%H%M%S") return (PED, KD) def run_date(s): if 'SWO' in s: date_string = s.split("_", 5)[4] else: date_string = s.split("_", 3)[2] return pd.datetime.strptime(date_string, "%Y%m%d.%H%M%S") def get_ftp(folder): ftp = FTP('ftp.globeop.com') ftp.login('srntsftp', config.ftp_password) ftp.cwd(folder) return ftp def get_gpg(): if os.name=='nt': gpg = gnupg.GPG(gpgbinary = r'"c:\\Program Files (x86)\\GNU\\GnuPG\\gpg2.exe"', gnupghome = os.path.join(os.getenv('APPDATA'), "gnupg")) elif os.name == 'posix': gpg = gnupg.GPG(homedir = '/home/guillaume/.gnupg') gpg.encoding = 'utf8' return gpg def download_data(workdate): ftp = get_ftp('outgoing') files = ftp.nlst() pnlfiles = [filename for filename in files if "csv" in filename and \ "Profit" in filename if get_ped(filename) < workdate] valuationfiles = [filename for filename in files if "csv" in filename and \ "Valuation_TradeID" in filename if get_ped(filename) < workdate] cdsfiles = [filename for filename in files if "TradeSearch" in filename \ if run_date(filename).date()<=workdate] available_files = [] if pnlfiles: available_files.append(sorted(pnlfiles, key=key_fun, reverse=True)[0]) if valuationfiles: available_files.append(sorted(valuationfiles, key=key_fun, reverse=True)[0]) if cdsfiles: available_files.append(sorted(cdsfiles, key=run_date, reverse=True)[0]) if not available_files: logger.error("no file available for date: %s" % str(workdate)) return reports_dir = os.path.join(root, str(workdate), "Reports") if not os.path.exists(reports_dir): os.makedirs(reports_dir) for filename in available_files: with open(os.path.join(reports_dir, filename), "wb") as fh: ftp.retrbinary('RETR ' + filename, fh.write) logger.info("downloaded {0}".format(filename)) gpg = get_gpg() for filename in available_files: if "Profit" in filename: newfilename = "Pnl.csv" elif "Valuation" in filename: newfilename = "Valuation_Report.csv" else: newfilename = "CDS_Report.xls" with open(os.path.join(reports_dir, filename), "rb") as fh: dec = gpg.decrypt_file(fh, output = os.path.join(reports_dir, newfilename), passphrase=config.key_password, always_trust=True) logger.info('{0}: {1}'.format(filename, dec.status)) os.remove(os.path.join(reports_dir, filename)) if os.path.exists(os.path.join(reports_dir, "CDS_Report.xls")): df = pd.read_excel(os.path.join(reports_dir, "CDS_Report.xls"), sheetname=0, skiprows=[0,1,2,3]) df.to_csv(os.path.join(reports_dir, "CDS_Report.csv"), index=False) os.remove(os.path.join(reports_dir, "CDS_Report.xls")) def upload_bond_marks(engine, workdate): df = pd.read_sql_query("SELECT * from list_marks(%s)", engine, params = (workdate.date(),)) df.rename(columns = {'identifier': 'IDENTIFIER', 'price': 'Price'}, inplace=True) filename = 'securitiesNpv{0:%Y%m%d_%H%M%S}.csv'.format(workdate) fullpath = os.path.join(root, str(workdate.date()), filename) df.to_csv(fullpath, index=False) ftp = get_ftp('incoming') with open(fullpath, "rb") as fh: ftp.storbinary('STOR ' + filename, fh) logger.info("upload bond marks done") def upload_cds_marks(engine, workdate): df = pd.read_sql_query("""SELECT cds.dealid AS "DealID", 'CREDIT_SWAP' AS "Instrument Type", (a.clean_nav+a.accrued) AS "NPV" from list_abscds_marks(%s) a JOIN cds USING (security_id)""", engine, params = (workdate.date(),)) filename = 'otcNpv{0:%Y%m%d}.csv'.format(workdate) fullpath = os.path.join(root, str(workdate.date()), filename) df.to_csv(fullpath, index=False) ftp = get_ftp('incoming') with open(fullpath, "rb") as fh: ftp.storbinary('STOR ' + filename, fh) logger.info("upload cds marks done") def upload_data(engine, workdate): upload_bond_marks(engine, workdate) upload_cds_marks(engine, workdate)