mirror of
https://github.com/acon96/home-llm.git
synced 2026-01-10 06:07:58 -05:00
126 lines
4.0 KiB
YAML
126 lines
4.0 KiB
YAML
name: Create Release
|
|
|
|
on:
|
|
workflow_dispatch:
|
|
inputs:
|
|
release_notes:
|
|
description: "Release Notes"
|
|
required: true
|
|
type: string
|
|
|
|
permissions:
|
|
contents: write
|
|
|
|
jobs:
|
|
build_wheels:
|
|
name: Build wheels for ${{ matrix.arch }} (HA ${{ matrix.home_assistant_image }})
|
|
runs-on: ubuntu-latest
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
include:
|
|
# ARM64
|
|
- home_assistant_image: "aarch64-homeassistant:2025.4.1"
|
|
arch: "aarch64"
|
|
|
|
# 32bit ARM (Raspberry pis)
|
|
- home_assistant_image: "armhf-homeassistant:2025.4.1"
|
|
arch: "armhf"
|
|
|
|
# x64
|
|
- home_assistant_image: "amd64-homeassistant:2025.4.1"
|
|
arch: "x86_64"
|
|
|
|
# 32 bit for older processors
|
|
- home_assistant_image: "i386-homeassistant:2025.4.1"
|
|
arch: "i386"
|
|
|
|
steps:
|
|
- name: Checkout code
|
|
uses: actions/checkout@v4
|
|
|
|
- name: Verify version match
|
|
if: startsWith(github.event.ref, 'refs/tags/v')
|
|
run: |
|
|
tag_version=$(echo ${{ github.ref }} | sed 's/refs\/tags\/v//')
|
|
component_version_manifest=$(jq -r '.version' custom_components/llama_conversation/manifest.json)
|
|
component_version_const=$(cat custom_components/llama_conversation/const.py | grep "INTEGRATION_VERSION" | tr -d ' ' | tr -d '"' | tr -d 'INTEGRATION_VERSION=')
|
|
|
|
if [ "$tag_version" != "$component_version_manifest" ]; then
|
|
echo "The version in the GitHub tag ($tag_version) does not match the version in the Home Assistant custom component manifest ($component_version_manifest)!"
|
|
exit 1
|
|
fi
|
|
|
|
if [ "$tag_version" != "$component_version_const" ]; then
|
|
echo "The version in the GitHub tag ($tag_version) does not match the version in const.py ($component_version_const)!"
|
|
exit 1
|
|
fi
|
|
|
|
echo "All required versions match."
|
|
|
|
- name: Read llama-cpp-python version
|
|
run: cat custom_components/llama_conversation/const.py | grep "EMBEDDED_LLAMA_CPP_PYTHON_VERSION" | tr -d ' ' | tr -d '"' >> $GITHUB_ENV
|
|
|
|
- name: Build artifact
|
|
uses: uraimo/run-on-arch-action@v2
|
|
id: build
|
|
with:
|
|
arch: none
|
|
distro: none
|
|
base_image: homeassistant/${{ matrix.home_assistant_image }}
|
|
|
|
# Create an artifacts directory
|
|
setup: |
|
|
mkdir -p "${PWD}/artifacts"
|
|
|
|
# Mount the artifacts directory as /artifacts in the container
|
|
dockerRunArgs: |
|
|
--volume "${PWD}/artifacts:/artifacts"
|
|
|
|
# The shell to run commands with in the container
|
|
shell: /bin/bash
|
|
|
|
# Produce a binary artifact and place it in the mounted volume
|
|
run: |
|
|
apk update
|
|
apk add build-base python3-dev cmake
|
|
pip3 install build
|
|
|
|
cd /tmp
|
|
git clone --quiet --recurse-submodules https://github.com/abetlen/llama-cpp-python --branch "v${{ env.EMBEDDED_LLAMA_CPP_PYTHON_VERSION }}"
|
|
cd llama-cpp-python
|
|
|
|
tag="homellm"
|
|
sed -i -E "s/^(__version__ *= *\"[0-9]+\.[0-9]+\.[0-9]+)\"/\1+${tag}\"/" llama_cpp/__init__.py
|
|
|
|
export CMAKE_ARGS="-DLLAVA_BUILD=OFF -DGGML_NATIVE=OFF -DGGML_CPU_ALL_VARIANTS=ON -DGGML_BACKEND_DL=ON"
|
|
python3 -m build --wheel
|
|
|
|
mv ./dist/*.whl /artifacts
|
|
ls -la /artifacts/
|
|
|
|
- name: Upload artifacts
|
|
uses: actions/upload-artifact@v4
|
|
with:
|
|
path: ./artifacts/*.whl
|
|
name: artifact_${{ matrix.arch }}
|
|
|
|
release:
|
|
name: Create Release
|
|
needs: [ build_wheels ]
|
|
runs-on: ubuntu-latest
|
|
if: startsWith(github.event.ref, 'refs/tags/v')
|
|
|
|
steps:
|
|
- name: Download artifacts
|
|
uses: actions/download-artifact@v4
|
|
with:
|
|
path: dist
|
|
merge-multiple: true
|
|
|
|
- name: Create GitHub release
|
|
uses: softprops/action-gh-release@v2
|
|
with:
|
|
files: dist/*
|
|
body: ${{ inputs.release_notes }}
|
|
make_latest: true |