# This is the new version of Slab that handles multi backend providers. [backend.aws.m7i-cpu-bench] region = "eu-west-1" image_id = "ami-002bdcd64b8472cf9" # Based on Ubuntu 22.4 instance_type = "m7i.48xlarge" security_group = ["sg-0e55cc31dfda0d8a7", ] [profile.m7i-cpu-bench] region = "eu-west-1" image_id = "ami-002bdcd64b8472cf9" # Based on Ubuntu 22.4 instance_type = "m7i.48xlarge" security_group= ["sg-0e55cc31dfda0d8a7", ] [profile.m7i-cpu-test] region = "eu-west-1" image_id = "ami-002bdcd64b8472cf9" instance_type = "m7i.16xlarge" security_group= ["sg-0e55cc31dfda0d8a7", ] [profile.m7i-metal] region = "eu-west-1" image_id = "ami-002bdcd64b8472cf9" instance_type = "m7i.metal-24xl" security_group= ["sg-0e55cc31dfda0d8a7", ] [profile.gpu-bench] region = "us-east-1" image_id = "ami-08e27480d79e82238" instance_type = "p3.2xlarge" subnet_id = "subnet-8123c9e7" security_group= ["sg-017afab1f328af917", ] # Docker is well configured for test inside docker in this AMI [profile.gpu-test] region = "us-east-1" image_id = "ami-0257c6ad39f902b5e" instance_type = "p3.2xlarge" subnet_id = "subnet-8123c9e7" security_group= ["sg-017afab1f328af917", ] # It has CUDA Driver (<=12.5) and Docker installed [profile.gpu-test-ubuntu22] region = "us-east-1" image_id = "ami-05385e0c3c574621f" instance_type = "p3.2xlarge" subnet_id = "subnet-8123c9e7" security_group= ["sg-017afab1f328af917", ] [profile.slurm-cluster] region = "eu-west-3" image_id = "ami-0bb5bb9cb747b5ddd" instance_id = "i-0e5ae2a14134d6275" instance_type = "m6i.8xlarge" security_group= ["sg-02dd8470fa845f31b", ] ################################################# # Compiler commands ################################################# [command.compiler-cpu-build] workflow = "compiler_build_and_test_cpu.yml" profile = "m7i-cpu-test" check_run_name = "Compiler Build and Test (CPU)" [command.compiler-cpu-build-distributed] workflow = "compiler_build_and_test_cpu_distributed.yml" profile = "slurm-cluster" check_run_name = "Compiler Distributed Build and Test (CPU)" runner_name = "distributed-ci" [command.compiler-gpu-build] workflow = "compiler_build_and_test_gpu.yml" profile = "gpu-test" check_run_name = "Compiler Build and Test (GPU)" [command.compiler-cpu-benchmark] workflow = "compiler_benchmark.yml" profile = "m7i-cpu-bench" check_run_name = "Compiler Performances Benchmarks (CPU)" [command.compiler-gpu-benchmark] workflow = "compiler_benchmark.yml" profile = "gpu-bench" check_run_name = "Compiler Performances Benchmarks (GPU)" # Trigger Docker images build [command.compiler-publish-docker-images] workflow = "compiler_publish_docker_images.yml" profile = "m7i-cpu-test" check_run_name = "Compiler - Docker images build & publish" # Trigger ML benchmarks by running each use cases subset in parallel. [command.ml-bench] workflow = "ml_benchmark_subset.yml" profile = "m7i-cpu-bench" matrix = [0,1,2,3,4,5,6,7,8,9,10] max_parallel_jobs = 2 # Trigger ML tests with latest CP [command.ml-test] workflow = "concrete_ml_tests.yml" profile = "m7i-cpu-test" check_run_name = "Concrete ML Tests" ################################################# # Concrete Python Commands ################################################# [command.concrete-python-tests-linux] workflow = "concrete_python_tests_linux.yml" profile = "m7i-cpu-test" check_run_name = "Concrete Python Tests (Linux)" [command.concrete-python-tests-linux-gpu] workflow = "concrete_python_tests_linux.yml" profile = "gpu-test" check_run_name = "Concrete Python Tests (Linux Gpu)" ################################################# # Release Commands ################################################# [command.concrete-python-release] workflow = "concrete_python_release.yml" profile = "m7i-cpu-test" check_run_name = "Concrete Python Release" [command.concrete-python-release-gpu] workflow = "concrete_python_release_gpu.yml" profile = "m7i-cpu-test" check_run_name = "Concrete Python Release (GPU)" [command.concrete-python-test-gpu-wheel] workflow = "concrete_python_test_gpu_wheel.yml" profile = "gpu-test" check_run_name = "Concrete Python Test GPU Wheel"