Change tflite tests from sharkimporter -> sharkdownloader (#182)

* Change tflite test from sharkimporter -> sharkdownloader

* xfail all uint/int tflite sharkdownloader tests
This commit is contained in:
Chi_Liu
2022-07-14 13:40:25 -07:00
committed by GitHub
parent 79caf729f7
commit 8434c67d96
39 changed files with 558 additions and 611 deletions

View File

@@ -23,6 +23,8 @@ input_type_to_np_dtype = {
"bool": np.bool_,
"int32": np.int32,
"int64": np.int64,
"uint8": np.uint8,
"int8": np.int8,
}
@@ -32,7 +34,7 @@ class SharkDownloader:
model_name: str,
tank_url: str = "https://storage.googleapis.com/shark_tank",
local_tank_dir: str = "./../gen_shark_tank/tflite",
model_type: str = "tflite-tosa",
model_type: str = "tflite",
input_json: str = "input.json",
input_type: str = "int32",
):
@@ -84,7 +86,7 @@ class SharkDownloader:
def load_json_input(self):
print("load json inputs")
if self.model_type in ["tflite-tosa"]:
if self.model_type in ["tflite"]:
input_url = (
self.tank_url + "/" + str(self.model_name) + "/" + "input.json"
)
@@ -109,7 +111,7 @@ class SharkDownloader:
return self.inputs
def load_mlir_model(self):
if self.model_type in ["tflite-tosa"]:
if self.model_type in ["tflite"]:
self.mlir_url = (
self.tank_url
+ "/"

View File

@@ -4,30 +4,36 @@ from shark.shark_inference import SharkInference
import pytest
import unittest
from shark.parser import shark_args
import os
import sys
import urllib.request
from PIL import Image
from shark.tflite_utils import TFLitePreprocessor
# model_path = "https://github.com/tensorflow/tflite-micro/raw/aeac6f39e5c7475cea20c54e86d41e3a38312546/tensorflow/lite/micro/models/person_detect.tflite"
# model_path = "https://tfhub.dev/tensorflow/lite-model/albert_lite_base/squadv1/1?lite-format=tflite"
# model_path = model_path
# Inputs modified to be useful albert inputs.
def generate_inputs(input_details):
exe_basename = os.path.basename(sys.argv[0])
workdir = os.path.join(os.path.dirname(__file__), "../tmp", exe_basename)
os.makedirs(workdir, exist_ok=True)
for input in input_details:
print(str(input["shape"]), input["dtype"].__name__)
img_path = "https://github.com/tensorflow/tflite-micro/raw/aeac6f39e5c7475cea20c54e86d41e3a38312546/tensorflow/lite/micro/examples/person_detection/testdata/person.bmp"
local_path = "/".join([workdir, "person.bmp"])
urllib.request.urlretrieve(img_path, local_path)
shape = input_details[0]["shape"]
im = np.array(Image.open(local_path).resize((shape[1], shape[2]))).astype(
input_details[0]["dtype"]
args = []
args.append(
np.random.randint(
low=0,
high=256,
size=input_details[0]["shape"],
dtype=input_details[0]["dtype"],
)
)
args.append(
np.ones(
shape=input_details[1]["shape"], dtype=input_details[1]["dtype"]
)
)
args.append(
np.zeros(
shape=input_details[2]["shape"], dtype=input_details[2]["dtype"]
)
)
args = [im.reshape(shape)]
return args
@@ -41,12 +47,14 @@ def compare_results(mlir_results, tflite_results, details):
tflite_result = tflite_results[i]
mlir_result = mlir_result.astype(np.single)
tflite_result = tflite_result.astype(np.single)
print("mlir_result.shape", mlir_result.shape)
print("tflite_result.shape", tflite_result.shape)
assert mlir_result.shape == tflite_result.shape, "shape doesnot match"
max_error = np.max(np.abs(mlir_result - tflite_result))
print("Max error (%d): %f", i, max_error)
class PersonDetectionTfliteModuleTester:
class AlbertTfliteModuleTester:
def __init__(
self,
dynamic=False,
@@ -64,25 +72,7 @@ class PersonDetectionTfliteModuleTester:
shark_args.save_vmfb = self.save_vmfb
# Preprocess to get SharkImporter input args
# The input has known expected values. We hardcode this value.
input_details = [
{
"shape": [1, 96, 96, 1],
"dtype": np.int8,
"index": 0,
}
]
output_details = [
{
"shape": [1, 2],
"dtype": np.int8,
}
]
tflite_preprocessor = TFLitePreprocessor(
model_name="person_detect",
input_details=input_details,
output_details=output_details,
)
tflite_preprocessor = TFLitePreprocessor(model_name="albert_lite_base")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
@@ -104,8 +94,20 @@ class PersonDetectionTfliteModuleTester:
mlir_dialect="tflite",
)
# Case2: Use manually set inputs
# Case1: Use shark_importer default generate inputs
shark_module.compile()
mlir_results = shark_module.forward(inputs)
## post process results for compare
input_details, output_details = tflite_preprocessor.get_model_details()
mlir_results = list(mlir_results)
for i in range(len(output_details)):
dtype = output_details[i]["dtype"]
mlir_results[i] = mlir_results[i].astype(dtype)
tflite_results = tflite_preprocessor.get_raw_model_output()
compare_results(mlir_results, tflite_results, output_details)
# Case2: Use manually set inputs
input_details, output_details = tflite_preprocessor.get_model_details()
inputs = generate_inputs(input_details) # new inputs
shark_module = SharkInference(
@@ -117,23 +119,26 @@ class PersonDetectionTfliteModuleTester:
shark_module.compile()
mlir_results = shark_module.forward(inputs)
## post process results for compare
# The input has known expected values. We hardcode this value.
tflite_results = [np.array([[-113, 113]], dtype=np.int8)]
tflite_results = tflite_preprocessor.get_raw_model_output()
compare_results(mlir_results, tflite_results, output_details)
# print(mlir_results)
class PersonDetectionTfliteModuleTest(unittest.TestCase):
class AlbertTfliteModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.save_mlir = pytestconfig.getoption("save_mlir")
self.save_vmfb = pytestconfig.getoption("save_vmfb")
def setUp(self):
self.module_tester = PersonDetectionTfliteModuleTester(self)
self.module_tester = AlbertTfliteModuleTester(self)
self.module_tester.save_mlir = self.save_mlir
@pytest.mark.skip(reason="TFLite is broken with this model")
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False
self.module_tester.device = "cpu"
@@ -141,7 +146,7 @@ class PersonDetectionTfliteModuleTest(unittest.TestCase):
if __name__ == "__main__":
# module_tester = PersonDetectionTfliteModuleTester()
# module_tester = AlbertTfliteModuleTester()
# module_tester.save_mlir = True
# module_tester.save_vmfb = True
# module_tester.create_and_check_module()

View File

@@ -1,90 +0,0 @@
import sys
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
from shark.parser import shark_args
class AlbertTfliteModuleTester:
def __init__(
self,
dynamic=False,
device="cpu",
save_mlir=False,
save_vmfb=False,
):
self.dynamic = dynamic
self.device = device
self.save_mlir = save_mlir
self.save_vmfb = save_vmfb
def create_and_check_module(self):
shark_args.save_mlir = self.save_mlir
shark_args.save_vmfb = self.save_vmfb
shark_downloader = SharkDownloader(
model_name="albert_lite_base",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite-tosa",
input_json="input.json",
input_type="int32",
)
tflite_tosa_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
shark_module = SharkInference(
mlir_module=tflite_tosa_model,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)
shark_module.compile()
shark_module.forward(inputs)
# print(shark_results)
class AlbertTfliteModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.save_mlir = pytestconfig.getoption("save_mlir")
self.save_vmfb = pytestconfig.getoption("save_vmfb")
def setUp(self):
self.module_tester = AlbertTfliteModuleTester(self)
self.module_tester.save_mlir = self.save_mlir
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False
self.module_tester.device = "cpu"
self.module_tester.create_and_check_module()
if __name__ == "__main__":
unittest.main()
# module_tester = AlbertTfliteModuleTester()
# module_tester.create_and_check_module()
# TEST RESULT:
# (shark.venv) nod% python albert_lite_base_tflite_mlir_test.py
# load json inputs
# TMP_MODEL_DIR = shark/SHARK/shark/./../gen_shark_tank/tflite
# Model has not been download.shark_downloader will automatically download by tank_url if provided. You can also manually to download the model from shark_tank by yourself.
# TMP_MODELNAME_DIR = shark/SHARK/shark/./../gen_shark_tank/tflite/albert_lite_base
# Download mlir model https://storage.googleapis.com/shark_tank/tflite/albert_lite_base/albert_lite_base_tosa.mlir
# Get tosa.mlir model return
# Target triple found:x86_64-linux-gnu
# (shark.venv) nod% python albert_lite_base_tflite_mlir_test.py
# load json inputs
# TMP_MODEL_DIR = shark/SHARK/shark/./../gen_shark_tank/tflite
# TMP_MODELNAME_DIR = shark/SHARK/shark/./../gen_shark_tank/tflite/albert_lite_base
# Model has been downloaded before. shark/SHARK/shark/./../gen_shark_tank/tflite/albert_lite_base/albert_lite_base_tosa.mlir
# Get tosa.mlir model return
# Target triple found:x86_64-linux-gnu
#

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -70,58 +70,26 @@ class AlbertTfliteModuleTester:
def create_and_check_module(self):
shark_args.save_mlir = self.save_mlir
shark_args.save_vmfb = self.save_vmfb
# Preprocess to get SharkImporter input args
tflite_preprocessor = TFLitePreprocessor(model_name="albert_lite_base")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="albert_lite_base",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="int32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
# Use SharkInference to get inference result
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
device=self.device,
mlir_dialect="tflite",
)
# Case1: Use shark_importer default generate inputs
shark_module.compile()
mlir_results = shark_module.forward(inputs)
## post process results for compare
input_details, output_details = tflite_preprocessor.get_model_details()
mlir_results = list(mlir_results)
for i in range(len(output_details)):
dtype = output_details[i]["dtype"]
mlir_results[i] = mlir_results[i].astype(dtype)
tflite_results = tflite_preprocessor.get_raw_model_output()
compare_results(mlir_results, tflite_results, output_details)
# Case2: Use manually set inputs
input_details, output_details = tflite_preprocessor.get_model_details()
inputs = generate_inputs(input_details) # new inputs
tflite_tosa_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
mlir_module=tflite_tosa_model,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)
shark_module.compile()
mlir_results = shark_module.forward(inputs)
## post process results for compare
tflite_results = tflite_preprocessor.get_raw_model_output()
compare_results(mlir_results, tflite_results, output_details)
# print(mlir_results)
shark_module.forward(inputs)
# print(shark_results)
class AlbertTfliteModuleTest(unittest.TestCase):
@@ -146,9 +114,24 @@ class AlbertTfliteModuleTest(unittest.TestCase):
if __name__ == "__main__":
unittest.main()
# module_tester = AlbertTfliteModuleTester()
# module_tester.save_mlir = True
# module_tester.save_vmfb = True
# module_tester.create_and_check_module()
unittest.main()
# TEST RESULT:
# (shark.venv) nod% python albert_lite_base_tflite_mlir_test.py
# load json inputs
# TMP_MODEL_DIR = shark/SHARK/shark/./../gen_shark_tank/tflite
# Model has not been download.shark_downloader will automatically download by tank_url if provided. You can also manually to download the model from shark_tank by yourself.
# TMP_MODELNAME_DIR = shark/SHARK/shark/./../gen_shark_tank/tflite/albert_lite_base
# Download mlir model https://storage.googleapis.com/shark_tank/tflite/albert_lite_base/albert_lite_base_tosa.mlir
# Get tosa.mlir model return
# Target triple found:x86_64-linux-gnu
# (shark.venv) nod% python albert_lite_base_tflite_mlir_test.py
# load json inputs
# TMP_MODEL_DIR = shark/SHARK/shark/./../gen_shark_tank/tflite
# TMP_MODELNAME_DIR = shark/SHARK/shark/./../gen_shark_tank/tflite/albert_lite_base
# Model has been downloaded before. shark/SHARK/shark/./../gen_shark_tank/tflite/albert_lite_base/albert_lite_base_tosa.mlir
# Get tosa.mlir model return
# Target triple found:x86_64-linux-gnu
#

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -48,22 +48,22 @@ class ArbitraryImageStylizationV1TfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="arbitrary-image-stylization-v1-256"
)
# inputs = tflite_preprocessor.get_inputs()
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="arbitrary-image-stylization-v1-256",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)
@@ -96,7 +96,8 @@ class ArbitraryImageStylizationV1TfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -65,22 +65,22 @@ class BirdsV1TfliteModuleTester:
shark_args.save_vmfb = self.save_vmfb
tflite_preprocessor = TFLitePreprocessor(model_name="birds_V1")
# inputs = tflite_preprocessor.get_inputs()
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="birds_V1",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="uint8",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)
@@ -102,7 +102,7 @@ class BirdsV1TfliteModuleTester:
inputs = generate_inputs(input_details) # device_inputs
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)
@@ -126,7 +126,8 @@ class BirdsV1TfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -46,21 +46,27 @@ class CartoonganTfliteModuleTester:
shark_args.save_vmfb = self.save_vmfb
tflite_preprocessor = TFLitePreprocessor(model_name="cartoongan")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
# input_details, output_details = tflite_preprocessor.get_model_details()
# print(input_details[0]["dtype"])
# import pdb
# pdb.set_trace()
shark_downloader = SharkDownloader(
model_name="cartoongan",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)

View File

@@ -1,13 +1,9 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
from shark.parser import shark_args
import os
import sys
import urllib.request
from PIL import Image
from shark.tflite_utils import TFLitePreprocessor
@@ -51,23 +47,23 @@ class DeepLabV3TfliteModuleTester:
# preprocess to get SharkImporter input args
tflite_preprocessor = TFLitePreprocessor(model_name="deeplabv3")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="deeplabv3",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
# Use SharkInference to get inference result
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -47,23 +47,22 @@ class DensenetTfliteModuleTester:
# Preprocess to get SharkImporter input args
tflite_preprocessor = TFLitePreprocessor(model_name="densenet")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="densenet",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
# Use SharkInference to get inference result
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -63,18 +63,19 @@ class Efficientnet_224_fp32TfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="efficientnet_224_fp32"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="efficientnet_224_fp32",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -63,23 +63,22 @@ class Efficientnet_lite0_fp32_2TfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="efficientnet_lite0_fp32_2"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="efficientnet_lite0_fp32_2",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
# Use SharkInference to get inference result
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)
@@ -102,7 +101,7 @@ class Efficientnet_lite0_fp32_2TfliteModuleTester:
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)
@@ -127,7 +126,8 @@ class Efficientnet_lite0_fp32_2TfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -61,23 +61,22 @@ class Efficientnet_lite0_int8_2TfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="efficientnet_lite0_int8_2"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="efficientnet_lite0_int8_2",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="uint8",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
# Use SharkInference to get inference result
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)
@@ -100,7 +99,7 @@ class Efficientnet_lite0_int8_2TfliteModuleTester:
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)
@@ -125,7 +124,8 @@ class Efficientnet_lite0_int8_2TfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -60,23 +60,22 @@ class GptTfliteModuleTester:
# Preprocess to get SharkImporter input args
tflite_preprocessor = TFLitePreprocessor(model_name="gpt2-64")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="gpt2-64",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="int32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
# Use SharkInference to get inference result
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)
@@ -99,7 +98,7 @@ class GptTfliteModuleTester:
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,
function_name="main",
device=self.device,
mlir_dialect="tflite",
)

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -63,20 +63,20 @@ class Inception_v4_299_fp32TfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="inception_v4_299_fp32"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="inception_v4_299_fp32",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
mlir_module=mlir_model,
function_name=func_name,

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -60,18 +60,22 @@ class Inception_v4_299_uint8TfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="inception_v4_299_uint8"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
tflite_preprocessor = TFLitePreprocessor(
model_name="inception_v4_299_uint8"
)
mlir_model, func_name = my_shark_importer.import_mlir()
# inputs = tflite_preprocessor.get_inputs()
shark_downloader = SharkDownloader(
model_name="inception_v4_299_uint8",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="uint8",
)
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -124,7 +128,8 @@ class Inception_v4_299_uint8TfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -47,18 +47,19 @@ class MidasTfliteModuleTester:
# Preprocess to get SharkImporter input args
tflite_preprocessor = TFLitePreprocessor(model_name="midas")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="midas",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -47,18 +47,19 @@ class MnasnetTfliteModuleTester:
# Preprocess to get SharkImporter input args
tflite_preprocessor = TFLitePreprocessor(model_name="mnasnet_1.0_224")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mnasnet_1.0_224",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -67,18 +67,19 @@ class MobilebertTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilebert-baseline-tf2-float"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilebert-baseline-tf2-float",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="int32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -67,18 +67,22 @@ class MobilebertTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilebert-baseline-tf2-quant"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilebert-baseline-tf2-quant"
)
mlir_model, func_name = my_shark_importer.import_mlir()
# inputs = tflite_preprocessor.get_inputs()
shark_downloader = SharkDownloader(
model_name="mobilebert-baseline-tf2-quant",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="int32",
)
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -131,7 +135,8 @@ class MobilebertTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -74,18 +74,19 @@ class MobilebertTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilebert-edgetpu-s-float"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilebert-edgetpu-s-float",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="int32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -74,18 +74,19 @@ class MobilebertTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilebert-edgetpu-s-quant"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilebert-edgetpu-s-quant",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="int32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -121,7 +122,8 @@ class MobilebertTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -64,18 +64,19 @@ class MobilebertTfliteModuleTester:
# Preprocess to get SharkImporter input args
tflite_preprocessor = TFLitePreprocessor(model_name="mobilebert")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilebert",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="int32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -62,18 +62,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilenet_v1_224_1.0_float"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilenet_v1_224_1.0_float",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -60,18 +60,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilenet_v1_224_1.0_uint8"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilenet_v1_224_1.0_uint8",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="uint8",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -124,7 +125,8 @@ class MobilenetTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -62,18 +62,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilenet_v2_1.00_224_int8"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilenet_v2_1.00_224_int8",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -126,7 +127,8 @@ class MobilenetTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -63,18 +63,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilenet_v2_1.0_224"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilenet_v2_1.0_224",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -60,18 +60,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilenet_v2_224_1.0_uint8"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilenet_v2_224_1.0_uint8",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="int32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -124,7 +125,8 @@ class MobilenetTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -63,18 +63,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilenet_v3-large_224_1.0_float"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilenet_v3-large_224_1.0_float",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -60,18 +60,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilenet_v3-large_224_1.0_uint8"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilenet_v3-large_224_1.0_uint8",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="uint8",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -124,7 +125,8 @@ class MobilenetTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -63,18 +63,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="mobilenet_v3.5multiavg_1.00_224_int8"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="mobilenet_v3.5multiavg_1.00_224_int8",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -127,7 +128,8 @@ class MobilenetTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -46,18 +46,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="multi_person_mobilenet_v1_075_float"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="multi_person_mobilenet_v1_075_float",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -47,18 +47,19 @@ class NasnetTfliteModuleTester:
# Preprocess to get SharkImporter input args
tflite_preprocessor = TFLitePreprocessor(model_name="nasnet")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="nasnet",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -49,18 +49,19 @@ class ResnetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="resnet_50_224_int8"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="resnet_50_224_int8",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -96,7 +97,8 @@ class ResnetTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -47,18 +47,19 @@ class SequeezeNetTfliteModuleTester:
# Preprocess to get SharkImporter input args
tflite_preprocessor = TFLitePreprocessor(model_name="squeezenet")
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="squeezenet",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -94,7 +95,8 @@ class SequeezeNetTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -60,18 +60,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="ssd_mobilenet_v1_320_1.0_float"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="ssd_mobilenet_v1_320_1.0_float",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -57,18 +57,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="ssd_mobilenet_v1_320_1.0_uint8"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="ssd_mobilenet_v1_320_1.0_uint8",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="uint8",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -121,7 +122,8 @@ class MobilenetTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -69,18 +69,19 @@ class MobilenetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="ssd_mobilenet_v2_face_quant"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="ssd_mobilenet_v2_face_quant",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="uint8",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -133,7 +134,8 @@ class MobilenetTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.pad' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -59,18 +59,19 @@ class SpaghettinetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="ssd_spaghettinet_edgetpu_large"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="ssd_spaghettinet_edgetpu_large",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="float32",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(

View File

@@ -1,5 +1,5 @@
import numpy as np
from shark.shark_importer import SharkImporter
from shark.shark_downloader import SharkDownloader
from shark.shark_inference import SharkInference
import pytest
import unittest
@@ -57,18 +57,19 @@ class SpaghettinetTfliteModuleTester:
tflite_preprocessor = TFLitePreprocessor(
model_name="ssd_spaghettinet_edgetpu_large_uint8"
)
raw_model_file_path = tflite_preprocessor.get_raw_model_file()
inputs = tflite_preprocessor.get_inputs()
tflite_interpreter = tflite_preprocessor.get_interpreter()
# inputs = tflite_preprocessor.get_inputs()
# Use SharkImporter to get SharkInference input args
my_shark_importer = SharkImporter(
module=tflite_interpreter,
inputs=inputs,
frontend="tflite",
raw_model_file=raw_model_file_path,
shark_downloader = SharkDownloader(
model_name="ssd_spaghettinet_edgetpu_large_uint8",
tank_url="https://storage.googleapis.com/shark_tank",
local_tank_dir="./../gen_shark_tank",
model_type="tflite",
input_json="input.json",
input_type="uint8",
)
mlir_model, func_name = my_shark_importer.import_mlir()
mlir_model = shark_downloader.get_mlir_file()
inputs = shark_downloader.get_inputs()
func_name = "main"
# Use SharkInference to get inference result
shark_module = SharkInference(
@@ -121,7 +122,8 @@ class SpaghettinetTfliteModuleTest(unittest.TestCase):
import sys
@pytest.mark.xfail(
sys.platform == "darwin", reason="known macos tflite install issue"
reason="known macos tflite install issue & "
"'tosa.conv2d' op attribute 'quantization_info' failed "
)
def test_module_static_cpu(self):
self.module_tester.dynamic = False