diff options
Diffstat (limited to 'python/quote_parsing/__main__.py')
| -rw-r--r-- | python/quote_parsing/__main__.py | 27 |
1 files changed, 13 insertions, 14 deletions
diff --git a/python/quote_parsing/__main__.py b/python/quote_parsing/__main__.py index acc56e5c..c97b21ef 100644 --- a/python/quote_parsing/__main__.py +++ b/python/quote_parsing/__main__.py @@ -34,8 +34,7 @@ except FileNotFoundError: for f in emails: date_composed, msg_id = f.name.split("_") - date_composed = datetime.datetime.strptime(date_composed, - "%Y-%m-%d %H-%M-%S") + date_composed = datetime.datetime.strptime(date_composed, "%Y-%m-%d %H-%M-%S") if msg_id in already_uploaded: continue else: @@ -47,29 +46,29 @@ for f in emails: if key[0] is None or len(option_stack) == 0: logger.error(f"Something wrong with email: {f.name}") continue - swaption_stack[key] = pd.concat(option_stack, - names=['expiry', 'strike']) + swaption_stack[key] = pd.concat(option_stack, names=["expiry", "strike"]) index_data = index_data.append(fwd_index) already_uploaded[msg_id] = key[0] if index_data.empty: sys.exit() -for col in ['fwdbpv', 'fwdprice', 'fwdspread', 'ref']: +for col in ["fwdbpv", "fwdprice", "fwdspread", "ref"]: if col in index_data: - index_data[col] = index_data[col].astype('float') -index_data['index'] = index_data['index'].astype('category') + index_data[col] = index_data[col].astype("float") +index_data["index"] = index_data["index"].astype("category") -swaption_stack = pd.concat(swaption_stack, - names=['quotedate', 'index', 'series']) +swaption_stack = pd.concat(swaption_stack, names=["quotedate", "index", "series"]) swaption_stack = swaption_stack.reset_index() -swaption_stack = swaption_stack.drop_duplicates(['quotedate', 'index', 'series', - 'expiry', 'strike']) -swaption_stack = swaption_stack.set_index(['quotedate', 'index', 'series', 'expiry']) +swaption_stack = swaption_stack.drop_duplicates( + ["quotedate", "index", "series", "expiry", "strike"] +) +swaption_stack = swaption_stack.set_index(["quotedate", "index", "series", "expiry"]) index_data = index_data.reset_index() -index_data = index_data.drop_duplicates(['quotedate', 'index', 'series', 'expiry']) +index_data = index_data.drop_duplicates(["quotedate", "index", "series", "expiry"]) from utils.db import serenitas_pool + conn = serenitas_pool.getconn() write_todb(swaption_stack, index_data, conn) serenitas_pool.putconn(conn) with open(".pickle", "wb") as fh: - pickle.dump(already_uploaded, fh) + pickle.dump(already_uploaded, fh) |
