mirror of
https://github.com/tinygrad/tinygrad.git
synced 2026-01-10 15:38:29 -05:00
Fix llama json loading (#2160)
This commit is contained in:
@@ -256,7 +256,7 @@ def concat_weights(models):
|
||||
def load(fn:str):
|
||||
if fn.endswith('.index.json'):
|
||||
with open(fn) as fp: weight_map = json.load(fp)['weight_map']
|
||||
parts = {n: load(Path(fn).parent / Path(n).name) for n in set(weight_map.values())}
|
||||
parts = {n: load(str(Path(fn).parent / Path(n).name)) for n in set(weight_map.values())}
|
||||
return {k: parts[n][k] for k, n in weight_map.items()}
|
||||
elif fn.endswith(".safetensors"):
|
||||
return safe_load(fn)
|
||||
|
||||
Reference in New Issue
Block a user