Revert "add/remove pytest marks to TF tests"

This reverts commit b522f933a9.
This commit is contained in:
Ean Garvey
2022-06-20 15:36:20 +00:00
parent b522f933a9
commit f1edf88c33
18 changed files with 191 additions and 784 deletions

View File

@@ -1,3 +0,0 @@
def pytest_addoption(parser):
# Attaches SHARK command-line arguments to the pytest machinery.
parser.addoption("--save_temps", action="store_true", default="False", help="Saves IREE reproduction artifacts for filing upstream issues.")

View File

@@ -3,65 +3,30 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class AlbertBaseModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("albert-base-v2")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"albert_base_v2_dynamic_{device}"
else:
repro_dir = f"albert_base_v2_static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class AlbertBaseModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = AlbertBaseModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = AlbertBaseModuleTester()
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,65 +3,30 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class BertBaseUncasedModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("bert-base-uncased")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"bert_base_uncased_dynamic_{device}"
else:
repro_dir = f"bert_base_uncased_static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class BertBaseUncasedModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = BertBaseUncasedModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = BertBaseUncasedModuleTester()
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,65 +3,30 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class CamemBertModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("camembert-base")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"camembert-base_dynamic_{device}"
else:
repro_dir = f"camembert-base_static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class CamemBertModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester=CamemBertModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = CamemBertModuleTester()
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,66 +3,31 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class ConvBertModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model(
"dbmdz/convbert-base-turkish-cased")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"convbert_base_dynamic_{device}"
else:
repro_dir = f"convbert_base_static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class ConvBertModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = ConvBertModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = ConvBertModuleTester()
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,63 +3,28 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class DebertaModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("microsoft/deberta-base")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"deberta-base_dynamic_{device}"
else:
repro_dir = f"deberta-base_static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class DebertaModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = DebertaModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = DebertaModuleTester()
@pytest.mark.xfail
@pytest.mark.skip(reason="deberta currently failing in the lowering passes."

View File

@@ -3,64 +3,30 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class DistilBertModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("distilbert-base-uncased")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"distilbert_dynamic_{device}"
else:
repro_dir = f"distilbert__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class DistilBertModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = DistilBertModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = DistilBertModuleTester()
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,65 +3,31 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class ElectraModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("google/electra-small-discriminator")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"electra_dynamic_{device}"
else:
repro_dir = f"electra__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
model, input, act_out = get_causal_lm_model(
"google/electra-small-discriminator")
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class ElectraModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self):
self.module_tester = ElectraModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = ElectraModuleTester()
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,63 +3,28 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class FunnelModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("funnel-transformer/small")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"funnel_dynamic_{device}"
else:
repro_dir = f"funnel__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class FunnelModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = FunnelModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = FunnelModuleTester()
@pytest.mark.skip(reason="funnel currently failing in the lowering passes.")
def test_module_static_cpu(self):

View File

@@ -3,66 +3,31 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class LayoutLmModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model(
"microsoft/layoutlm-base-uncased")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"layoutlm_dynamic_{device}"
else:
repro_dir = f"layoutlm__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class LayoutLmModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = LayoutLmModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = LayoutLmModuleTester()
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,63 +3,29 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class LongFormerModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("allenai/longformer-base-4096")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"longformer_dynamic_{device}"
else:
repro_dir = f"longformer__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
model, input, act_out = get_causal_lm_model(
"allenai/longformer-base-4096")
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class LongFormerModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = LongFormerModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = LongFormerModuleTester()
@pytest.mark.skip(
reason="longformer currently failing in the lowering passes.")

View File

@@ -3,65 +3,30 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class MobileBertModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("google/mobilebert-uncased")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"mobilebert_dynamic_{device}"
else:
repro_dir = f"mobilebert__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class MobileBertModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = MobileBertModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = MobileBertModuleTester()
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,65 +3,30 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class MpNetModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("microsoft/mpnet-base")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"mpnet_dynamic_{device}"
else:
repro_dir = f"mpnet__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class MpNetModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = MpNetModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
def setUp(self):
self.module_tester = MpNetModuleTester()
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,63 +3,28 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class RemBertModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("google/rembert")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"rembert_dynamic_{device}"
else:
repro_dir = f"rembert__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class RemBertModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = RemBertModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = RemBertModuleTester()
@pytest.mark.skip(reason="rembert currently failing in the lowering passes."
)

View File

@@ -3,65 +3,30 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class RobertaModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("roberta-base")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"roberta_dynamic_{device}"
else:
repro_dir = f"roberta__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class RobertaModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = RobertaModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = RobertaModuleTester()
@pytest.mark.xfail(reason="Upstream IREE issue, see https://github.com/google/iree/issues/9536")
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,63 +3,28 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class TapasBaseModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("google/tapas-base")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"tapas-base_dynamic_{device}"
else:
repro_dir = f"tapas-base__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class TapasBaseModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = TapasBaseModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = TapasBaseModuleTester()
@pytest.mark.skip(reason="tapas currently failing in the lowering passes.")
def test_module_static_cpu(self):

View File

@@ -3,64 +3,31 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class FlauBertModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("hf-internal-testing/tiny-random-flaubert")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"flaubert_dynamic_{device}"
else:
repro_dir = f"flaubert__static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
model, input, act_out = get_causal_lm_model(
"hf-internal-testing/tiny-random-flaubert")
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class FlauBertModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = FlauBertModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = FlauBertModuleTester()
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"

View File

@@ -3,65 +3,30 @@ from tank.model_utils_tf import compare_tensors_tf
from shark.iree_utils import check_device_drivers
from shark.shark_inference import SharkInference
import iree.compiler as ireec
import unittest
import pytest
import numpy as np
import tempfile
class XLMRobertaModuleTester:
def __init__(
self,
save_temps=False
):
self.save_temps = save_temps
def create_and_check_module(self, dynamic, device):
model, input, act_out = get_causal_lm_model("xlm-roberta-base")
save_temps = self.save_temps
if save_temps == True:
if dynamic == True:
repro_dir = f"xlm_roberta_dynamic_{device}"
else:
repro_dir = f"xlm_roberta_static_{device}"
temp_dir = tempfile.mkdtemp(prefix=repro_dir)
np.set_printoptions(threshold=np.inf)
np.save(f"{temp_dir}/input1.npy", input[0])
np.save(f"{temp_dir}/input2.npy", input[1])
exp_out = act_out.numpy()
with open(f"{temp_dir}/expected_out.txt", "w") as out_file:
out_file.write(np.array2string(exp_out))
with ireec.tools.TempFileSaver(temp_dir):
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
else:
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
shark_module = SharkInference(model, (input,),
device=device,
dynamic=dynamic,
jit_trace=True)
shark_module.set_frontend("tensorflow")
shark_module.compile()
results = shark_module.forward((input))
assert True == compare_tensors_tf(act_out, results)
class XLMRobertaModuleTest(unittest.TestCase):
@pytest.fixture(autouse=True)
def configure(self, pytestconfig):
self.module_tester = XLMRobertaModuleTester(self)
self.module_tester.save_temps = pytestconfig.getoption("save_temps")
def setUp(self):
self.module_tester = XLMRobertaModuleTester()
@pytest.mark.skip(reason="Test currently hangs.")
@pytest.mark.xfail
def test_module_static_cpu(self):
dynamic = False
device = "cpu"