import logging import argparse from json.decoder import JSONDecodeError import datetime import concurrent.futures from .api import MarkitAPI from .quotes import MarkitQuoteKind logger = logging.getLogger(__name__) def process_asset_class(asset_class, after): already_uploaded = MarkitQuoteKind[asset_class].already_uploaded() while True: try: if data := MarkitAPI.get_data(asset_class, after): for key, quotes in data: quotes = list(quotes) # Don't try to insert into DB if already uploaded if key["id"] in already_uploaded: row = quotes[-1] else: for row in quotes: try: quote = MarkitQuoteKind[asset_class].from_markit_line( row ) except ValueError as e: MarkitQuoteKind[asset_class].clear() logger.error(f"Couldn't parse {msg_id}: {e}") continue else: quote.stage() quote.commit() # The after is specific so that we can avoid skipping any quotes # We would also get stuck sometimes without the quoteid being specified last_val = ( f"{row['receiveddatetime']},{asset_class}-9480-{row['quoteid']}" ) if after == last_val: break else: after = last_val else: break except JSONDecodeError: logger.error(f"Issue with {asset_class}: {after}") except AttributeError: MarkitAPI.update_api_key() if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( "start_from", type=datetime.date.fromisoformat, default=datetime.date.today(), nargs="?", ) args = parser.parse_args() with concurrent.futures.ThreadPoolExecutor() as executor: futures = [ executor.submit( process_asset_class, asset_class, int((args.start_from + datetime.timedelta(days=1)).strftime("%s")) * 1000, ) for asset_class in ["ABS", "CD", "TRS"] ] concurrent.futures.wait(futures)