mirror of
https://github.com/ROCm/ROCm.git
synced 2026-01-09 22:58:17 -05:00
* update conf and toc.yml.in * archive previous versions archive data files update anchors * primus pytorch: remove training batch size args * update primus megatron run cmds multi-node * update primus pytorch update * update update * update docker tag
59 lines
2.1 KiB
YAML
59 lines
2.1 KiB
YAML
docker:
|
|
pull_tag: rocm/primus:v25.11
|
|
docker_hub_url: https://hub.docker.com/layers/rocm/primus/v25.10/images/sha256-140c37cd2eeeb183759b9622543fc03cc210dc97cbfa18eeefdcbda84420c197
|
|
components:
|
|
ROCm: 7.1.0
|
|
PyTorch: 2.10.0.dev20251112+rocm7.1
|
|
Python: "3.10"
|
|
Transformer Engine: 2.4.0.dev0+32e2d1d4
|
|
Flash Attention: 2.8.3
|
|
hipBLASLt: 1.2.0-09ab7153e2
|
|
Triton: 3.4.0
|
|
RCCL: 2.27.7
|
|
model_groups:
|
|
- group: Meta Llama
|
|
tag: llama
|
|
models:
|
|
- model: Llama 3.3 70B
|
|
mad_tag: primus_pyt_megatron_lm_train_llama-3.3-70b
|
|
config_name: llama3.3_70B-pretrain.yaml
|
|
- model: Llama 3.1 70B
|
|
mad_tag: primus_pyt_megatron_lm_train_llama-3.1-70b
|
|
config_name: llama3.1_70B-pretrain.yaml
|
|
- model: Llama 3.1 8B
|
|
mad_tag: primus_pyt_megatron_lm_train_llama-3.1-8b
|
|
config_name: llama3.1_8B-pretrain.yaml
|
|
- model: Llama 2 7B
|
|
mad_tag: primus_pyt_megatron_lm_train_llama-2-7b
|
|
config_name: llama2_7B-pretrain.yaml
|
|
- model: Llama 2 70B
|
|
mad_tag: primus_pyt_megatron_lm_train_llama-2-70b
|
|
config_name: llama2_70B-pretrain.yaml
|
|
- group: DeepSeek
|
|
tag: deepseek
|
|
models:
|
|
- model: DeepSeek-V3 (proxy)
|
|
mad_tag: primus_pyt_megatron_lm_train_deepseek-v3-proxy
|
|
config_name: deepseek_v3-pretrain.yaml
|
|
- model: DeepSeek-V2-Lite
|
|
mad_tag: primus_pyt_megatron_lm_train_deepseek-v2-lite-16b
|
|
config_name: deepseek_v2_lite-pretrain.yaml
|
|
- group: Mistral AI
|
|
tag: mistral
|
|
models:
|
|
- model: Mixtral 8x7B
|
|
mad_tag: primus_pyt_megatron_lm_train_mixtral-8x7b
|
|
config_name: mixtral_8x7B_v0.1-pretrain.yaml
|
|
- model: Mixtral 8x22B (proxy)
|
|
mad_tag: primus_pyt_megatron_lm_train_mixtral-8x22b-proxy
|
|
config_name: mixtral_8x22B_v0.1-pretrain.yaml
|
|
- group: Qwen
|
|
tag: qwen
|
|
models:
|
|
- model: Qwen 2.5 7B
|
|
mad_tag: primus_pyt_megatron_lm_train_qwen2.5-7b
|
|
config_name: primus_qwen2.5_7B-pretrain.yaml
|
|
- model: Qwen 2.5 72B
|
|
mad_tag: primus_pyt_megatron_lm_train_qwen2.5-72b
|
|
config_name: qwen2.5_72B-pretrain.yaml
|