envvar BERT_LAYERS [pr] (#8709)

default is 24 for large
This commit is contained in:
chenyu
2025-01-21 22:49:19 -05:00
committed by GitHub
parent 9f6d545a16
commit 9a9079118e

View File

@@ -203,7 +203,7 @@ def get_mlperf_bert_config():
"intermediate_size": 4096,
"max_position_embeddings": 512,
"num_attention_heads": 16,
"num_hidden_layers": 24,
"num_hidden_layers": getenv("BERT_LAYERS", 24),
"type_vocab_size": 2,
"vocab_size": 30522
}