Add workflow for GH pages releases and release scraping script. (#394)

* Add workflow for GH pages releases and release scraping script.

* Update test_models.py and change tokens for gh pages.
This commit is contained in:
Ean Garvey
2022-10-11 22:03:33 -05:00
committed by GitHub
parent 53df0620e3
commit f508c80c23
7 changed files with 92 additions and 10 deletions

37
.github/workflows/gh-pages-releases.yml vendored Normal file
View File

@@ -0,0 +1,37 @@
# See: https://github.com/llvm/torch-mlir/issues/1374
name: Publish releases page
on:
workflow_dispatch:
jobs:
scrape_and_publish_releases:
name: "Scrape and publish releases"
runs-on: ubuntu-latest
# Don't run this in everyone's forks.
if: github.repository == 'nod-ai/SHARK'
steps:
- name: Checking out repository
uses: actions/checkout@v2
with:
token: ${{ secrets.NODAI_INVOCATION_TOKEN }}
- name: Run scrape releases script
run: python ./build_tools/scrape_releases.py nod-ai SHARK > /tmp/index.html
shell: bash
- run: git fetch --all
- run: git switch github-pages
- run: git config --global user.email "none@none.com"
- run: git config --global user.name "nod-team"
- run: mv /tmp/index.html package-index/index.html
- run: git add package-index/index.html
# Only try to make a commit if the file has changed.
- run: git diff --cached --exit-code || git commit -m "Update releases."
- name: GitHub Push
uses: ad-m/github-push-action@v0.6.0
with:
github_token: ${{ secrets.NODAI_INVOCATION_TOKEN }}
branch: github-pages

View File

@@ -39,6 +39,10 @@ jobs:
tag_name="${package_version}"
echo "package_version=${package_version}" >> $GITHUB_ENV
echo "tag_name=${tag_name}" >> $GITHUB_ENV
- name: Set Environment Variables
run: |
echo "SHORT_SHA=`git rev-parse --short=4 HEAD`" >> $GITHUB_ENV
echo "DATE=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
- name: Create Release
id: create_release
uses: actions/create-release@v1
@@ -81,7 +85,7 @@ jobs:
pip install ./wheelhouse/nodai*
# Validate the Models
/bin/bash "$GITHUB_WORKSPACE/build_tools/populate_sharktank_ci.sh"
pytest tank/test_models.py |
pytest --ci --ci_sha=${SHORT_SHA} --local_tank_cache="./gen_shark_tank/" tank/test_models.py |
tail -n 1 |
tee -a pytest_results.txt
if !(grep -Fxq " failed" pytest_results.txt)
@@ -104,13 +108,10 @@ jobs:
# Install the built wheel
pip install ./wheelhouse/nodai*
# Validate the Models
pytest tank/test_models.py |
pytest --ci --ci_sha=${SHORT_SHA} --local_tank_cache="./gen_shark_tank/" tank/test_models.py |
tail -n 1 |
tee -a pytest_results.txt
publish:
runs-on: a100
needs: build
steps:
- name: Upload Release Assets
if: ${{ matrix.backend == 'SHARK' }}
id: upload-release-assets

View File

@@ -38,7 +38,7 @@ python -m pip install --upgrade pip
This step pip installs SHARK and related packages on Linux Python 3.7, 3.8, 3.9, 3.10 and macOS Python 3.10
```shell
pip install nodai-shark -f https://github.com/nod-ai/SHARK/releases -f https://github.com/llvm/torch-mlir/releases -f https://github.com/nod-ai/shark-runtime/releases --extra-index-url https://download.pytorch.org/whl/nightly/cpu
pip install nodai-shark -f https://nod-ai.github.io/SHARK/package-index/ -f https://llvm.github.io/torch-mlir/package-index/ -f https://github.com/nod-ai/shark-runtime/releases --extra-index-url https://download.pytorch.org/whl/nightly/cpu
```
If you are on an Intel macOS machine you need this [workaround](https://github.com/nod-ai/SHARK/issues/102) for an upstream issue.

View File

@@ -0,0 +1,37 @@
"""Scrapes the github releases API to generate a static pip-install-able releases page.
See https://github.com/llvm/torch-mlir/issues/1374
"""
import argparse
import json
import requests
# Parse arguments
parser = argparse.ArgumentParser()
parser.add_argument("owner", type=str)
parser.add_argument("repo", type=str)
args = parser.parse_args()
# Get releases
response = requests.get(
f"https://api.github.com/repos/{args.owner}/{args.repo}/releases"
)
body = json.loads(response.content)
# Parse releases
releases = []
for row in body:
for asset in row["assets"]:
releases.append((asset["name"], asset["browser_download_url"]))
# Output HTML
html = """<!DOCTYPE html>
<html>
<body>
"""
for name, url in releases:
html += f" <a href='{url}'>{name}</a><br />\n"
html += """ </body>
</html>"""
print(html)

View File

@@ -311,6 +311,7 @@ class SharkModuleTest(unittest.TestCase):
reason="https://github.com/nod-ai/SHARK/issues/311, https://github.com/nod-ai/SHARK/issues/342"
)
if config["model_name"] == "funnel-transformer/small" and device in [
"cpu",
"cuda",
"metal",
"vulkan",
@@ -348,11 +349,17 @@ class SharkModuleTest(unittest.TestCase):
and device == "cuda"
):
pytest.xfail(reason="https://github.com/nod-ai/SHARK/issues/390")
if config["model_name"] == "squeezenet1_0" and device == "vulkan":
if config["model_name"] == "squeezenet1_0" and device in [
"metal",
"vulkan",
]:
pytest.xfail(
reason="Numerics Issues: https://github.com/nod-ai/SHARK/issues/388"
)
if config["model_name"] == "mobilenet_v3_small" and device == "vulkan":
if config["model_name"] == "mobilenet_v3_small" and device in [
"metal",
"vulkan",
]:
pytest.xfail(
reason="Numerics Issues: https://github.com/nod-ai/SHARK/issues/388"
)

View File

@@ -0,0 +1 @@
hf-internal-testing/tiny-random-flaubert,hf
1 hf-internal-testing/tiny-random-flaubert hf

View File

@@ -5,7 +5,6 @@ camembert-base,hf
dbmdz/convbert-base-turkish-cased,hf
distilbert-base-uncased,hf
google/electra-small-discriminator,hf
hf-internal-testing/tiny-random-flaubert,hf
funnel-transformer/small,hf
microsoft/layoutlm-base-uncased,hf
google/mobilebert-uncased,hf
1 model_name model_type
5 dbmdz/convbert-base-turkish-cased hf
6 distilbert-base-uncased hf
7 google/electra-small-discriminator hf
hf-internal-testing/tiny-random-flaubert hf
8 funnel-transformer/small hf
9 microsoft/layoutlm-base-uncased hf
10 google/mobilebert-uncased hf