aboutsummaryrefslogtreecommitdiffstats
path: root/python/quote_parsing/__main__.py
blob: b95ae46cc0c9b36cd9302eef70e0c3306efa73f7 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import datetime
import logging
import os
import pandas as pd
import pickle
import sys

from . import SerenitasFileHandler
from . import logger
from .download_emails import save_emails
from .parse_emails import parse_email, write_todb

fh = SerenitasFileHandler("emails_parsing.log")
logger.addHandler(fh)
logger.setLevel(logging.WARNING)

try:
    save_emails()
except (errors.HttpError, FileNotFoundError) as e:
    logger.error(e)
    save_emails(update=False)

data_dir = os.path.join(os.getenv("DATA_DIR"), "swaptions")
emails = [f for f in os.scandir(data_dir) if f.is_file()]
swaption_stack = {}
index_data = pd.DataFrame()

try:
    with open(".pickle", "rb") as fh:
        already_uploaded = pickle.load(fh)
except FileNotFoundError:
    already_uploaded = {}

for f in emails:
    date_composed, msg_id = f.name.split("_")
    date_composed = datetime.datetime.strptime(date_composed,
                                               "%Y-%m-%d %H-%M-%S")
    if msg_id in already_uploaded:
        continue
    else:
        try:
            key, (option_stack, fwd_index) = parse_email(f, date_composed)
        except RuntimeError as e:
            logger.error(e)
        else:
            if key[0] is None or len(option_stack) == 0:
                logger.error(f"Something wrong with email: {f.name}")
                continue
            swaption_stack[key] = pd.concat(option_stack,
                                            names=['expiry', 'strike'])
            index_data = index_data.append(fwd_index)
            already_uploaded[msg_id] = key[0]
if index_data.empty:
    sys.exit()
for col in ['fwdbpv', 'fwdprice', 'fwdspread', 'ref']:
    if col in index_data:
        index_data[col] = index_data[col].astype('float')
index_data['index'] = index_data['index'].astype('category')

swaption_stack = pd.concat(swaption_stack,
                           names=['quotedate', 'index', 'series'])
swaption_stack = swaption_stack.reset_index()
swaption_stack = swaption_stack.drop_duplicates(['quotedate', 'index', 'series',
                                                 'expiry', 'strike'])
swaption_stack = swaption_stack.set_index(['quotedate', 'index', 'series', 'expiry'])
index_data = index_data.reset_index()
index_data = index_data.drop_duplicates(['quotedate', 'index', 'series', 'expiry'])
from utils.db import serenitas_pool
conn = serenitas_pool.getconn()
write_todb(swaption_stack, index_data, conn)
serenitas_pool.putconn(conn)

with open(".pickle", "wb") as fh:
   pickle.dump(already_uploaded, fh)