fixed a bug where lm would crash with certain smaller models

This commit is contained in:
Elias Joseph
2023-03-23 07:36:04 +00:00
parent 2f891a6c23
commit 6c5705cf09

View File

@@ -324,7 +324,7 @@ if __name__ == "__main__":
mlir_str = bytes(mlir_str, "utf-8")
if config["n_embed"] == 14336:
if "n_embed" in config.keys() and config["n_embed"] == 14336:
def get_state_dict():
d = torch.load(