aboutsummaryrefslogtreecommitdiffstats
path: root/python/quote_parsing/__main__.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/quote_parsing/__main__.py')
-rw-r--r--python/quote_parsing/__main__.py66
1 files changed, 66 insertions, 0 deletions
diff --git a/python/quote_parsing/__main__.py b/python/quote_parsing/__main__.py
new file mode 100644
index 00000000..ae75cd92
--- /dev/null
+++ b/python/quote_parsing/__main__.py
@@ -0,0 +1,66 @@
+import logging
+from ..utils import SerenitasFileHandler
+from . import logger
+from .download_emails import save_emails
+fh = SerenitasFileHandler("emails_parsing.log")
+logger.setHandler(fh)
+logger.setLevel(logging.WARNING)
+
+try:
+ save_emails()
+except (errors.HttpError, FileNotFoundError) as e:
+ logger.error(e)
+ save_emails(update=False)
+
+data_dir = os.path.join(os.getenv("DATA_DIR"), "swaptions")
+emails = [f for f in os.scandir(data_dir) if f.is_file()]
+swaption_stack = {}
+index_data = pd.DataFrame()
+
+try:
+ with open(".pickle", "rb") as fh:
+ already_uploaded = pickle.load(fh)
+except FileNotFoundError:
+ already_uploaded = {}
+
+for f in emails:
+ date_composed, msg_id = f.name.split("_")
+ date_composed = datetime.datetime.strptime(date_composed,
+ "%Y-%m-%d %H-%M-%S")
+ if msg_id in already_uploaded:
+ continue
+ else:
+ try:
+ key, (option_stack, fwd_index) = parse_email(f, date_composed)
+ except RuntimeError as e:
+ logger.error(e)
+ else:
+ if key[0] is None or len(option_stack) == 0:
+ logger.error(f"Something wrong with email: {f.name}")
+ continue
+ swaption_stack[key] = pd.concat(option_stack,
+ names=['expiry', 'strike'])
+ index_data = index_data.append(fwd_index)
+ already_uploaded[msg_id] = key[0]
+if index_data.empty:
+ sys.exit()
+for col in ['fwdbpv', 'fwdprice', 'fwdspread', 'ref']:
+ if col in index_data:
+ index_data[col] = index_data[col].astype('float')
+index_data['index'] = index_data['index'].astype('category')
+
+swaption_stack = pd.concat(swaption_stack,
+ names=['quotedate', 'index', 'series'])
+swaption_stack = swaption_stack.reset_index()
+swaption_stack = swaption_stack.drop_duplicates(['quotedate', 'index', 'series',
+ 'expiry', 'strike'])
+swaption_stack = swaption_stack.set_index(['quotedate', 'index', 'series', 'expiry'])
+index_data = index_data.reset_index()
+index_data = index_data.drop_duplicates(['quotedate', 'index', 'series', 'expiry'])
+from ..utils.db import serenitas_pool
+conn = serenitas_pool.getconn()
+write_todb(swaption_stack, index_data, conn)
+serenitas_pool.putconn(conn)
+
+with open(".pickle", "wb") as fh:
+ pickle.dump(already_uploaded, fh)