Fix llama json loading (#2160)

This commit is contained in:
will
2023-10-27 13:21:56 -07:00
committed by GitHub
parent 8d41b3eb3f
commit bc0829b677

View File

@@ -256,7 +256,7 @@ def concat_weights(models):
def load(fn:str):
if fn.endswith('.index.json'):
with open(fn) as fp: weight_map = json.load(fp)['weight_map']
parts = {n: load(Path(fn).parent / Path(n).name) for n in set(weight_map.values())}
parts = {n: load(str(Path(fn).parent / Path(n).name)) for n in set(weight_map.values())}
return {k: parts[n][k] for k, n in weight_map.items()}
elif fn.endswith(".safetensors"):
return safe_load(fn)