(enh) add caching@v4 action in workflows (#3780)

* dummy test change

* regen yml: 1st install python 3.11, then poetry

* fix caching for poetry; old entry for python was rather useless

* fix steps order (cache before poetry)

* add poetry caching to ghcr_runtime; fix fork conditions

* ghcr_runtime: more caching actions; condition fixes

* fix interim action error (order of steps)

* cache@v4 instead of v3

* fixed interim typo for 2 fork conditions

* runtime/test_env_vars: compacted multiple tests into one to reduce time

* ugh if fork condition changes again
This commit is contained in:
tobitege
2024-09-09 10:49:49 +02:00
committed by GitHub
parent dadada18ce
commit 2b7517e542
5 changed files with 92 additions and 92 deletions

View File

@@ -38,10 +38,6 @@ jobs:
base_image: base_image:
- image: 'nikolaik/python-nodejs:python3.11-nodejs22' - image: 'nikolaik/python-nodejs:python3.11-nodejs22'
tag: nikolaik tag: nikolaik
- image: 'python:3.11-bookworm'
tag: python
- image: 'node:22-bookworm'
tag: node
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -70,31 +66,39 @@ jobs:
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v3
- name: Install poetry via pipx
run: pipx install poetry
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: '3.11' python-version: '3.11'
cache: 'poetry' - name: Cache Poetry dependencies
uses: actions/cache@v4
with:
path: |
~/.cache/pypoetry
~/.virtualenvs
key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }}
restore-keys: |
${{ runner.os }}-poetry-
- name: Install poetry via pipx
run: pipx install poetry
- name: Install Python dependencies using Poetry - name: Install Python dependencies using Poetry
run: make install-python-dependencies run: make install-python-dependencies
- name: Create source distribution and Dockerfile - name: Create source distribution and Dockerfile
run: poetry run python3 openhands/runtime/utils/runtime_build.py --base_image ${{ matrix.base_image.image }} --build_folder containers/runtime --force_rebuild run: poetry run python3 openhands/runtime/utils/runtime_build.py --base_image ${{ matrix.base_image.image }} --build_folder containers/runtime --force_rebuild
- name: Build and push runtime image ${{ matrix.base_image.image }} - name: Build and push runtime image ${{ matrix.base_image.image }}
if: "!github.event.pull_request.head.repo.fork" if: github.event.pull_request.head.repo.fork != true
run: | run: |
./containers/build.sh runtime ${{ github.repository_owner }} --push ${{ matrix.base_image.tag }} ./containers/build.sh runtime ${{ github.repository_owner }} --push ${{ matrix.base_image.tag }}
# Forked repos can't push to GHCR, so we need to upload the image as an artifact # Forked repos can't push to GHCR, so we need to upload the image as an artifact
- name: Build runtime image ${{ matrix.base_image.image }} for fork - name: Build runtime image ${{ matrix.base_image.image }} for fork
if: "github.event.pull_request.head.repo.fork" if: github.event.pull_request.head.repo.fork
uses: docker/build-push-action@v6 uses: docker/build-push-action@v6
with: with:
tags: ghcr.io/all-hands-ai/runtime:${{ github.sha }}-${{ matrix.base_image.tag }} tags: ghcr.io/all-hands-ai/runtime:${{ github.sha }}-${{ matrix.base_image.tag }}
outputs: type=docker,dest=/tmp/runtime-${{ matrix.base_image.tag }}.tar outputs: type=docker,dest=/tmp/runtime-${{ matrix.base_image.tag }}.tar
context: containers/runtime context: containers/runtime
- name: Upload runtime image for fork - name: Upload runtime image for fork
if: "github.event.pull_request.head.repo.fork" if: github.event.pull_request.head.repo.fork != true
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: runtime-${{ matrix.base_image.tag }} name: runtime-${{ matrix.base_image.tag }}
@@ -107,7 +111,7 @@ jobs:
needs: [ghcr_build_runtime] needs: [ghcr_build_runtime]
strategy: strategy:
matrix: matrix:
base_image: ['nikolaik', 'python', 'node'] base_image: ['nikolaik']
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Free Disk Space (Ubuntu) - name: Free Disk Space (Ubuntu)
@@ -121,22 +125,30 @@ jobs:
swap-storage: true swap-storage: true
# Forked repos can't push to GHCR, so we need to download the image as an artifact # Forked repos can't push to GHCR, so we need to download the image as an artifact
- name: Download runtime image for fork - name: Download runtime image for fork
if: "github.event.pull_request.head.repo.fork" if: github.event.pull_request.head.repo.fork
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
with: with:
name: runtime-${{ matrix.base_image }} name: runtime-${{ matrix.base_image }}
path: /tmp path: /tmp
- name: Load runtime image for fork - name: Load runtime image for fork
if: "github.event.pull_request.head.repo.fork" if: github.event.pull_request.head.repo.fork
run: | run: |
docker load --input /tmp/runtime-${{ matrix.base_image }}.tar docker load --input /tmp/runtime-${{ matrix.base_image }}.tar
- name: Install poetry via pipx - name: Cache Poetry dependencies
run: pipx install poetry uses: actions/cache@v4
with:
path: |
~/.cache/pypoetry
~/.virtualenvs
key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }}
restore-keys: |
${{ runner.os }}-poetry-
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: '3.11' python-version: '3.11'
cache: 'poetry' - name: Install poetry via pipx
run: pipx install poetry
- name: Install Python dependencies using Poetry - name: Install Python dependencies using Poetry
run: make install-python-dependencies run: make install-python-dependencies
- name: Run runtime tests - name: Run runtime tests
@@ -162,27 +174,35 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
base_image: ['nikolaik', 'python', 'node'] base_image: ['nikolaik']
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
# Forked repos can't push to GHCR, so we need to download the image as an artifact # Forked repos can't push to GHCR, so we need to download the image as an artifact
- name: Download runtime image for fork - name: Download runtime image for fork
if: "github.event.pull_request.head.repo.fork" if: github.event.pull_request.head.repo.fork
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
with: with:
name: runtime-${{ matrix.base_image }} name: runtime-${{ matrix.base_image }}
path: /tmp path: /tmp
- name: Load runtime image for fork - name: Load runtime image for fork
if: "github.event.pull_request.head.repo.fork" if: github.event.pull_request.head.repo.fork
run: | run: |
docker load --input /tmp/runtime-${{ matrix.base_image }}.tar docker load --input /tmp/runtime-${{ matrix.base_image }}.tar
- name: Install poetry via pipx - name: Cache Poetry dependencies
run: pipx install poetry uses: actions/cache@v4
with:
path: |
~/.cache/pypoetry
~/.virtualenvs
key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }}
restore-keys: |
${{ runner.os }}-poetry-
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: '3.11' python-version: '3.11'
cache: 'poetry' - name: Install poetry via pipx
run: pipx install poetry
- name: Install Python dependencies using Poetry - name: Install Python dependencies using Poetry
run: make install-python-dependencies run: make install-python-dependencies
- name: Run integration tests - name: Run integration tests

View File

@@ -22,13 +22,21 @@ jobs:
python-version: ['3.11'] python-version: ['3.11']
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install poetry via pipx
run: pipx install poetry
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
cache: 'poetry' - name: Cache Poetry dependencies
uses: actions/cache@v4
with:
path: |
~/.cache/pypoetry
~/.virtualenvs
key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }}
restore-keys: |
${{ runner.os }}-poetry-
- name: Install poetry via pipx
run: pipx install poetry
- name: Install Python dependencies using Poetry - name: Install Python dependencies using Poetry
run: poetry install --without evaluation,llama-index run: poetry install --without evaluation,llama-index
- name: Install & Start Docker - name: Install & Start Docker

View File

@@ -29,18 +29,25 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install poetry via pipx
run: pipx install poetry
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: "3.11" python-version: "3.11"
cache: 'poetry' - name: Cache Poetry dependencies
uses: actions/cache@v4
with:
path: |
~/.cache/pypoetry
~/.virtualenvs
key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }}
restore-keys: |
${{ runner.os }}-poetry-
- name: Install poetry via pipx
run: pipx install poetry
- name: Install Python dependencies using Poetry - name: Install Python dependencies using Poetry
run: poetry install --without evaluation,llama-index run: make install-python-dependencies
- name: Build Environment - name: Build Environment
run: make build run: make build
- name: Regenerate integration tests - name: Regenerate integration tests
run: | run: |
DEBUG=${{ inputs.debug }} \ DEBUG=${{ inputs.debug }} \
@@ -48,7 +55,6 @@ jobs:
FORCE_REGENERATE_TESTS=${{ inputs.force_regenerate_tests }} \ FORCE_REGENERATE_TESTS=${{ inputs.force_regenerate_tests }} \
FORCE_USE_LLM=${{ inputs.force_use_llm }} \ FORCE_USE_LLM=${{ inputs.force_use_llm }} \
./tests/integration/regenerate.sh ./tests/integration/regenerate.sh
- name: Commit changes - name: Commit changes
run: | run: |
if git diff --quiet --exit-code; then if git diff --quiet --exit-code; then

View File

@@ -165,7 +165,7 @@ class EventStreamRuntime(Runtime):
return docker.from_env() return docker.from_env()
except Exception as ex: except Exception as ex:
logger.error( logger.error(
'Launch docker client failed. Please make sure you have installed docker and started the docker daemon.' 'Launch docker client failed. Please make sure you have installed docker and started docker desktop/daemon.'
) )
raise ex raise ex

View File

@@ -34,71 +34,37 @@ def test_env_vars_os_environ(temp_dir, box_class, run_as_openhands):
time.sleep(1) time.sleep(1)
def test_env_vars_runtime_add_env_vars(temp_dir, box_class): def test_env_vars_runtime_operations(temp_dir, box_class):
runtime = _load_runtime(temp_dir, box_class) runtime = _load_runtime(temp_dir, box_class)
# Test adding single env var
runtime.add_env_vars({'QUUX': 'abc"def'}) runtime.add_env_vars({'QUUX': 'abc"def'})
obs = runtime.run_action(CmdRunAction(command='echo $QUUX'))
obs: CmdOutputObservation = runtime.run_action(CmdRunAction(command='echo $QUUX'))
print(obs)
assert obs.exit_code == 0, 'The exit code should be 0.'
assert ( assert (
obs.content.strip().split('\r\n')[0].strip() == 'abc"def' obs.exit_code == 0 and obs.content.strip().split('\r\n')[0].strip() == 'abc"def'
), f'Output: [{obs.content}] for {box_class}'
runtime.close()
time.sleep(1)
def test_env_vars_runtime_add_empty_dict(temp_dir, box_class):
runtime = _load_runtime(temp_dir, box_class)
prev_obs = runtime.run_action(CmdRunAction(command='env'))
assert prev_obs.exit_code == 0, 'The exit code should be 0.'
print(prev_obs)
runtime.add_env_vars({})
obs = runtime.run_action(CmdRunAction(command='env'))
assert obs.exit_code == 0, 'The exit code should be 0.'
print(obs)
assert (
obs.content == prev_obs.content
), 'The env var content should be the same after adding an empty dict.'
runtime.close()
time.sleep(1)
def test_env_vars_runtime_add_multiple_env_vars(temp_dir, box_class):
runtime = _load_runtime(temp_dir, box_class)
runtime.add_env_vars({'QUUX': 'abc"def', 'FOOBAR': 'xyz'})
obs: CmdOutputObservation = runtime.run_action(
CmdRunAction(command='echo $QUUX $FOOBAR')
) )
print(obs)
assert obs.exit_code == 0, 'The exit code should be 0.' # Test adding multiple env vars
runtime.add_env_vars({'FOOBAR': 'xyz'})
obs = runtime.run_action(CmdRunAction(command='echo $QUUX $FOOBAR'))
assert ( assert (
obs.content.strip().split('\r\n')[0].strip() == 'abc"def xyz' obs.exit_code == 0
), f'Output: [{obs.content}] for {box_class}' and obs.content.strip().split('\r\n')[0].strip() == 'abc"def xyz'
)
# Test adding empty dict
prev_env = runtime.run_action(CmdRunAction(command='env')).content
runtime.add_env_vars({})
current_env = runtime.run_action(CmdRunAction(command='env')).content
assert prev_env == current_env
# Test overwriting env vars
runtime.add_env_vars({'QUUX': 'new_value'})
obs = runtime.run_action(CmdRunAction(command='echo $QUUX'))
assert (
obs.exit_code == 0
and obs.content.strip().split('\r\n')[0].strip() == 'new_value'
)
runtime.close() runtime.close()
time.sleep(1) time.sleep(1)
def test_env_vars_runtime_add_env_vars_overwrite(temp_dir, box_class):
with patch.dict(os.environ, {'SANDBOX_ENV_FOOBAR': 'BAZ'}):
runtime = _load_runtime(temp_dir, box_class)
runtime.add_env_vars({'FOOBAR': 'xyz'})
obs: CmdOutputObservation = runtime.run_action(
CmdRunAction(command='echo $FOOBAR')
)
print(obs)
assert obs.exit_code == 0, 'The exit code should be 0.'
assert (
obs.content.strip().split('\r\n')[0].strip() == 'xyz'
), f'Output: [{obs.content}] for {box_class}'
runtime.close()
time.sleep(1)