diff --git a/sz.py b/sz.py index 0d7e5be273..324a8e9935 100755 --- a/sz.py +++ b/sz.py @@ -4,6 +4,8 @@ import token import tokenize import itertools from tabulate import tabulate +from tinygrad.uop import Ops +from tinygrad.helpers import ContextVar TOKEN_WHITELIST = [token.OP, token.NAME, token.NUMBER, token.STRING] @@ -79,11 +81,15 @@ if __name__ == "__main__": print(tabulate([headers] + sorted(table, key=lambda x: -x[1]), headers="firstrow", floatfmt=".1f")+"\n") groups = sorted([('/'.join(x[0].rsplit("/", 1)[0].split("/")[0:2]), x[1], x[2]) for x in table]) dir_sizes = {} - for dir_name, group in itertools.groupby(groups, key=lambda x:x[0]): + for dir_name, _group in itertools.groupby(groups, key=lambda x:x[0]): + group = list(_group) dir_sizes[dir_name] = sum([x[1] for x in group]) - print(f"{dir_name:30s} : {dir_sizes[dir_name]:6d}") - print(f"\n core line count: {sum([v for k,v in dir_sizes.items() if k not in NONCORE_DIRS])}") + print(f"{dir_name:30s} : {dir_sizes[dir_name]:6d} in {len(group):2d} files") + print() + print(f" ops: {len(Ops)}") + print(f" flags: {len(ContextVar._cache)}") + print(f" core lines: {sum([v for k,v in dir_sizes.items() if k not in NONCORE_DIRS])}") total_lines = sum([x[1] for x in table]) - print(f"total line count: {total_lines}") + print(f"total lines: {total_lines}") max_line_count = int(os.getenv("MAX_LINE_COUNT", "-1")) assert max_line_count == -1 or total_lines <= max_line_count, f"OVER {max_line_count} LINES"