Split nightly workflow by backend (IREE / SHARK) (#313)

* Fix validation for nightly builds.

* Add option to generate shark_tank inside SHARK project
Add shark_arg for updating tank on mismatched hash (downloader)

* Fixup CI tank dir option.

* Fixup work directory variable
This commit is contained in:
Ean Garvey
2022-09-09 12:21:30 -05:00
committed by GitHub
parent bb52b224d0
commit 6aeda3670f
5 changed files with 61 additions and 40 deletions

View File

@@ -16,6 +16,7 @@ jobs:
fail-fast: false
matrix:
python-version: ["3.10"]
backend: [IREE, SHARK]
steps:
- uses: actions/checkout@v3
@@ -62,6 +63,7 @@ jobs:
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --exclude shark.venv,lit.cfg.py
- name: Build and validate the IREE package
if: ${{ matrix.backend == 'IREE' }}
run: |
cd $GITHUB_WORKSPACE
USE_IREE=1 VENV_DIR=iree.venv ./setup_venv.sh
@@ -73,18 +75,19 @@ jobs:
pip install ./wheelhouse/nodai*
# Validate the Models
/bin/bash "$GITHUB_WORKSPACE/build_tools/populate_sharktank_ci.sh"
pytest -k 'cpu' --ignore=benchmarks/tests/test_hf_benchmark.py --ignore=benchmarks/tests/test_benchmark.py --ignore=shark/tests/test_shark_importer.py --ignore=tank/tf/ |
tail -n 1 |
tee -a pytest_results.txt
pytest -k 'gpu' --ignore=benchmarks/tests/test_hf_benchmark.py --ignore=benchmarks/tests/test_benchmark.py --ignore=shark/tests/test_shark_importer.py --ignore=tank/tf/ |
tail -n 1 |
tee -a pytest_results.txt
pytest -k 'vulkan' --ignore=benchmarks/tests/test_hf_benchmark.py --ignore=benchmarks/tests/test_benchmark.py --ignore=shark/tests/test_shark_importer.py --ignore=tank/tf/ |
pytest tank |
tail -n 1 |
tee -a pytest_results.txt
if !(grep -Fxq " failed" pytest_results.txt)
then
export SHA=$(git log -1 --format='%h')
gsutil -m cp -r $GITHUB_WORKSPACE/gen_shark_tank/* gs://shark_tank/$SHA
gsutil -m cp -r gs://shark_tank/$SHA/* gs://shark_tank/latest/
fi
rm -rf ./wheelhouse/nodai*
- name: Build and validate the SHARK Runtime package
if: ${{ matrix.backend == 'SHARK' }}
run: |
cd $GITHUB_WORKSPACE
./setup_venv.sh
@@ -95,25 +98,12 @@ jobs:
# Install the built wheel
pip install ./wheelhouse/nodai*
# Validate the Models
pytest -k 'cpu' --ignore=benchmarks/tests/test_hf_benchmark.py --ignore=benchmarks/tests/test_benchmark.py --ignore=shark/tests/test_shark_importer.py --ignore=tank/tf/ |
pytest tank |
tail -n 1 |
tee -a pytest_results.txt
pytest -k 'gpu' --ignore=benchmarks/tests/test_hf_benchmark.py --ignore=benchmarks/tests/test_benchmark.py --ignore=shark/tests/test_shark_importer.py --ignore=tank/tf/ |
tail -n 1 |
tee -a pytest_results.txt
pytest -k 'vulkan' --ignore=benchmarks/tests/test_hf_benchmark.py --ignore=benchmarks/tests/test_benchmark.py --ignore=shark/tests/test_shark_importer.py --ignore=tank/tf/ |
tail -n 1 |
tee -a pytest_results.txt
if !(grep -Fxq " failed" pytest_results.txt)
then
export SHA=$(git log -1 --format='%h')
gsutil -m cp -r $GITHUB_WORKSPACE/gen_shark_tank/* gs://shark_tank/$SHA
gsutil -m cp -r gs://shark_tank/$SHA/* gs://shark_tank/latest/
fi
rm pytest_results.txt
- name: Upload Release Assets
if: ${{ matrix.backend == 'SHARK' }}
id: upload-release-assets
uses: dwenegar/upload-release-assets@v1
env:
@@ -123,6 +113,7 @@ jobs:
assets_path: ./wheelhouse/nodai_*.whl
- name: Publish Release
if: ${{ matrix.backend == 'SHARK' }}
id: publish_release
uses: eregon/publish-release@v1
env:

View File

@@ -2,4 +2,4 @@
IMPORTER=1 ./setup_venv.sh
source $GITHUB_WORKSPACE/shark.venv/bin/activate
python generate_sharktank.py --upload=False
python generate_sharktank.py --upload=False --ci_tank_dir=True

View File

@@ -13,6 +13,7 @@ import os
import csv
import argparse
from shark.shark_importer import SharkImporter
from shark.parser import shark_args
import tensorflow as tf
import subprocess as sp
import hashlib
@@ -29,10 +30,6 @@ except:
# Invalid device or cannot modify virtual devices once initialized.
pass
# All generated models and metadata will be saved under this directory.
home = str(Path.home())
WORKDIR = os.path.join(home, ".local/shark_tank/")
def create_hash(file_name):
with open(file_name, "rb") as f:
@@ -224,9 +221,21 @@ if __name__ == "__main__":
default="./tank/tflite/tflite_model_list.csv",
help="Contains the file with tf model name and args.",
)
parser.add_argument(
"--ci_tank_dir",
type=bool,
default=False,
)
parser.add_argument("--upload", type=bool, default=False)
args = parser.parse_args()
home = str(Path.home())
if args.ci_tank_dir == True:
WORKDIR = os.path.join(os.path.dirname(__file__), "gen_shark_tank")
else:
WORKDIR = os.path.join(home, ".local/shark_tank/")
if args.torch_model_csv:
save_torch_model(args.torch_model_csv)
@@ -239,6 +248,4 @@ if __name__ == "__main__":
if args.upload:
git_hash = sp.getoutput("git log -1 --format='%h'") + "/"
print("uploading files to gs://shark_tank/" + git_hash)
os.system(
"gsutil cp -r ~/.local/shark_tank/* gs://shark_tank/" + git_hash
)
os.system(f"gsutil cp -r {WORKDIR}* gs://shark_tank/" + git_hash)

View File

@@ -81,5 +81,11 @@ parser.add_argument(
default="latest",
help="gs://shark_tank/<this_flag>/model_directories",
)
parser.add_argument(
"--update_tank",
default=False,
action="store_true",
help="When enabled, SHARK downloader will update local shark_tank if local hash is different from latest upstream hash.",
)
shark_args, unknown = parser.parse_known_args()

View File

@@ -18,6 +18,7 @@ import urllib.request
import json
import hashlib
from pathlib import Path
from shark.parser import shark_args
input_type_to_np_dtype = {
"float32": np.float32,
@@ -32,7 +33,14 @@ input_type_to_np_dtype = {
# Save the model in the home local so it needn't be fetched everytime in the CI.
home = str(Path.home())
WORKDIR = os.path.join(home, ".local/shark_tank/")
alt_path = os.path.join(os.path.dirname(__file__), "../gen_shark_tank/")
if os.path.exists(alt_path):
WORKDIR = alt_path
print(
f"Using {WORKDIR} as shark_tank directory. Delete this directory if you aren't working from locally generated shark_tank."
)
else:
WORKDIR = os.path.join(home, ".local/shark_tank/")
print(WORKDIR)
@@ -110,9 +118,12 @@ def download_torch_model(
np.load(os.path.join(model_dir, "upstream_hash.npy"))
)
if local_hash != upstream_hash:
print(
"Hash does not match upstream in gs://shark_tank/. If you are using SHARK Downloader with locally generated artifacts, this is working as intended."
)
if shark_args.update_tank == True:
gs_download_model()
else:
print(
"Hash does not match upstream in gs://shark_tank/. If you are using SHARK Downloader with locally generated artifacts, this is working as intended."
)
model_dir = os.path.join(WORKDIR, model_dir_name)
with open(
@@ -171,9 +182,12 @@ def download_tflite_model(
np.load(os.path.join(model_dir, "upstream_hash.npy"))
)
if local_hash != upstream_hash:
print(
"Hash does not match upstream in gs://shark_tank/. If you are using SHARK Downloader with locally generated artifacts, this is working as intended."
)
if shark_args.update_tank == True:
gs_download_model()
else:
print(
"Hash does not match upstream in gs://shark_tank/. If you are using SHARK Downloader with locally generated artifacts, this is working as intended."
)
model_dir = os.path.join(WORKDIR, model_dir_name)
with open(
@@ -227,9 +241,12 @@ def download_tf_model(model_name, tuned=None, shark_default_sha="latest"):
np.load(os.path.join(model_dir, "upstream_hash.npy"))
)
if local_hash != upstream_hash:
print(
"Hash does not match upstream in gs://shark_tank/. If you are using SHARK Downloader with locally generated artifacts, this is working as intended."
)
if shark_args.update_tank == True:
gs_download_model()
else:
print(
"Hash does not match upstream in gs://shark_tank/. If you are using SHARK Downloader with locally generated artifacts, this is working as intended."
)
model_dir = os.path.join(WORKDIR, model_dir_name)
suffix = "_tf.mlir" if tuned is None else "_tf_" + tuned + ".mlir"