simple algorithm, opens files, checks which is larger for every figi, boom, produces statistical file to use to determine which figi is maximumu liquiditiy
24 lines
886 B
Python
24 lines
886 B
Python
import pathlib
|
|
import market_trade.constants
|
|
|
|
if __name__ == '__main__':
|
|
shares_maxsize_by_figi = {}
|
|
|
|
for figi_dir in market_trade.constants.SHARES_TRADES_PATH.iterdir():
|
|
|
|
# we check if the path is dir, because there are unwanted files present
|
|
if figi_dir.is_dir():
|
|
|
|
# compiling all the shares files to list
|
|
shares_filepaths = list(figi_dir.iterdir())
|
|
|
|
#TODO: REDO WITH LINES COUNTING
|
|
|
|
# getting maximum file by file size
|
|
maxsize_shares_filepath = max(shares_filepaths, key=lambda path: path.stat().st_size)
|
|
|
|
shares_maxsize_by_figi[figi_dir.name] = maxsize_shares_filepath.stat().st_size
|
|
shares_sorted_by_liquidity = sorted(shares_maxsize_by_figi.items(),
|
|
key=lambda dict_item: dict_item[1],
|
|
reverse=True)
|