finalized analyzer

added get_line_count function to count lines effectively
changed the internal backend of comparing file sizes
added saving to json
This commit is contained in:
Mark 2022-09-22 11:19:27 +03:00
parent be0ec953aa
commit 05e1a44353

View File

@ -1,5 +1,17 @@
import pathlib
import market_trade.constants
import json
def get_lines_count(filepath):
"""
This function gets filepath and returns count of the lines inside it (by default delimiter)
:param filepath: pathlike, preffered to be pathlib
:return: integer, coutn of the lines
"""
with open(filepath, mode="r", encoding="utf-8") as counted_file:
line_count = sum(1 for _ in counted_file)
return line_count
if __name__ == '__main__':
shares_maxsize_by_figi = {}
@ -12,12 +24,17 @@ if __name__ == '__main__':
# compiling all the shares files to list
shares_filepaths = list(figi_dir.iterdir())
#TODO: REDO WITH LINES COUNTING
# getting maximum file by line count
maxsize_shares_filepath = max(shares_filepaths, key=get_lines_count)
# getting maximum file by file size
maxsize_shares_filepath = max(shares_filepaths, key=lambda path: path.stat().st_size)
# setting maximum file value
shares_maxsize_by_figi[figi_dir.name] = get_lines_count(maxsize_shares_filepath)
shares_maxsize_by_figi[figi_dir.name] = maxsize_shares_filepath.stat().st_size
shares_sorted_by_liquidity = sorted(shares_maxsize_by_figi.items(),
# sorting the liquidity stats by the size
shares_sorted_by_liquidity = dict(sorted(shares_maxsize_by_figi.items(),
key=lambda dict_item: dict_item[1],
reverse=True)
reverse=True))
# saving them as a json stats file
with open(market_trade.constants.SHARES_STATS_PATH, mode="w", encoding="utf-8") as stats_file:
json.dump(shares_sorted_by_liquidity, stats_file, ensure_ascii=False)