feat(benchmarks): add constant indexing benchmarks

This commit is contained in:
Umut
2021-11-04 17:54:03 +03:00
parent b716ddcf76
commit d4909a729f
9 changed files with 432 additions and 0 deletions

View File

@@ -0,0 +1,48 @@
# bench: Unit Target: x[0, 0]
import numpy as np
from common import BENCHMARK_CONFIGURATION
import concrete.numpy as hnp
def main():
def function_to_compile(x):
return x[0, 0]
x = hnp.EncryptedTensor(hnp.UnsignedInteger(3), shape=(3, 2))
inputset = [(np.random.randint(0, 2 ** 3, size=(3, 2)),) for _ in range(32)]
# bench: Measure: Compilation Time (ms)
engine = hnp.compile_numpy_function(
function_to_compile,
{"x": x},
inputset,
compilation_configuration=BENCHMARK_CONFIGURATION,
)
# bench: Measure: End
inputs = []
labels = []
for _ in range(100):
sample_x = np.random.randint(0, 2 ** 3, size=(3, 2))
inputs.append([sample_x])
labels.append(function_to_compile(*inputs[-1]))
correct = 0
for input_i, label_i in zip(inputs, labels):
# bench: Measure: Evaluation Time (ms)
result_i = engine.run(*input_i)
# bench: Measure: End
if result_i == label_i:
correct += 1
# bench: Measure: Accuracy (%) = (correct / len(inputs)) * 100
# bench: Alert: Accuracy (%) != 100
if __name__ == "__main__":
main()

48
benchmarks/x-index-0.py Normal file
View File

@@ -0,0 +1,48 @@
# bench: Unit Target: x[0]
import numpy as np
from common import BENCHMARK_CONFIGURATION
import concrete.numpy as hnp
def main():
def function_to_compile(x):
return x[0]
x = hnp.EncryptedTensor(hnp.UnsignedInteger(3), shape=(3,))
inputset = [(np.random.randint(0, 2 ** 3, size=(3,)),) for _ in range(32)]
# bench: Measure: Compilation Time (ms)
engine = hnp.compile_numpy_function(
function_to_compile,
{"x": x},
inputset,
compilation_configuration=BENCHMARK_CONFIGURATION,
)
# bench: Measure: End
inputs = []
labels = []
for _ in range(100):
sample_x = np.random.randint(0, 2 ** 3, size=(3,))
inputs.append([sample_x])
labels.append(function_to_compile(*inputs[-1]))
correct = 0
for input_i, label_i in zip(inputs, labels):
# bench: Measure: Evaluation Time (ms)
result_i = engine.run(*input_i)
# bench: Measure: End
if result_i == label_i:
correct += 1
# bench: Measure: Accuracy (%) = (correct / len(inputs)) * 100
# bench: Alert: Accuracy (%) != 100
if __name__ == "__main__":
main()

48
benchmarks/x-index-1.py Normal file
View File

@@ -0,0 +1,48 @@
# bench: Unit Target: x[1]
import numpy as np
from common import BENCHMARK_CONFIGURATION
import concrete.numpy as hnp
def main():
def function_to_compile(x):
return x[1]
x = hnp.EncryptedTensor(hnp.UnsignedInteger(3), shape=(3,))
inputset = [(np.random.randint(0, 2 ** 3, size=(3,)),) for _ in range(32)]
# bench: Measure: Compilation Time (ms)
engine = hnp.compile_numpy_function(
function_to_compile,
{"x": x},
inputset,
compilation_configuration=BENCHMARK_CONFIGURATION,
)
# bench: Measure: End
inputs = []
labels = []
for _ in range(100):
sample_x = np.random.randint(0, 2 ** 3, size=(3,))
inputs.append([sample_x])
labels.append(function_to_compile(*inputs[-1]))
correct = 0
for input_i, label_i in zip(inputs, labels):
# bench: Measure: Evaluation Time (ms)
result_i = engine.run(*input_i)
# bench: Measure: End
if result_i == label_i:
correct += 1
# bench: Measure: Accuracy (%) = (correct / len(inputs)) * 100
# bench: Alert: Accuracy (%) != 100
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,48 @@
# bench: Unit Target: x[:2]
import numpy as np
from common import BENCHMARK_CONFIGURATION
import concrete.numpy as hnp
def main():
def function_to_compile(x):
return x[:2]
x = hnp.EncryptedTensor(hnp.UnsignedInteger(3), shape=(3,))
inputset = [(np.random.randint(0, 2 ** 3, size=(3,)),) for _ in range(32)]
# bench: Measure: Compilation Time (ms)
engine = hnp.compile_numpy_function(
function_to_compile,
{"x": x},
inputset,
compilation_configuration=BENCHMARK_CONFIGURATION,
)
# bench: Measure: End
inputs = []
labels = []
for _ in range(100):
sample_x = np.random.randint(0, 2 ** 3, size=(3,))
inputs.append([sample_x])
labels.append(function_to_compile(*inputs[-1]))
correct = 0
for input_i, label_i in zip(inputs, labels):
# bench: Measure: Evaluation Time (ms)
result_i = engine.run(*input_i)
# bench: Measure: End
if result_i == label_i:
correct += 1
# bench: Measure: Accuracy (%) = (correct / len(inputs)) * 100
# bench: Alert: Accuracy (%) != 100
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,48 @@
# bench: Unit Target: x[:, 1]
import numpy as np
from common import BENCHMARK_CONFIGURATION
import concrete.numpy as hnp
def main():
def function_to_compile(x):
return x[:, 1]
x = hnp.EncryptedTensor(hnp.UnsignedInteger(3), shape=(3, 2))
inputset = [(np.random.randint(0, 2 ** 3, size=(3, 2)),) for _ in range(32)]
# bench: Measure: Compilation Time (ms)
engine = hnp.compile_numpy_function(
function_to_compile,
{"x": x},
inputset,
compilation_configuration=BENCHMARK_CONFIGURATION,
)
# bench: Measure: End
inputs = []
labels = []
for _ in range(100):
sample_x = np.random.randint(0, 2 ** 3, size=(3, 2))
inputs.append([sample_x])
labels.append(function_to_compile(*inputs[-1]))
correct = 0
for input_i, label_i in zip(inputs, labels):
# bench: Measure: Evaluation Time (ms)
result_i = engine.run(*input_i)
# bench: Measure: End
if result_i == label_i:
correct += 1
# bench: Measure: Accuracy (%) = (correct / len(inputs)) * 100
# bench: Alert: Accuracy (%) != 100
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,48 @@
# bench: Unit Target: x[:]
import numpy as np
from common import BENCHMARK_CONFIGURATION
import concrete.numpy as hnp
def main():
def function_to_compile(x):
return x[:]
x = hnp.EncryptedTensor(hnp.UnsignedInteger(3), shape=(3,))
inputset = [(np.random.randint(0, 2 ** 3, size=(3,)),) for _ in range(32)]
# bench: Measure: Compilation Time (ms)
engine = hnp.compile_numpy_function(
function_to_compile,
{"x": x},
inputset,
compilation_configuration=BENCHMARK_CONFIGURATION,
)
# bench: Measure: End
inputs = []
labels = []
for _ in range(100):
sample_x = np.random.randint(0, 2 ** 3, size=(3,))
inputs.append([sample_x])
labels.append(function_to_compile(*inputs[-1]))
correct = 0
for input_i, label_i in zip(inputs, labels):
# bench: Measure: Evaluation Time (ms)
result_i = engine.run(*input_i)
# bench: Measure: End
if result_i == label_i:
correct += 1
# bench: Measure: Accuracy (%) = (correct / len(inputs)) * 100
# bench: Alert: Accuracy (%) != 100
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,48 @@
# bench: Unit Target: x[-1]
import numpy as np
from common import BENCHMARK_CONFIGURATION
import concrete.numpy as hnp
def main():
def function_to_compile(x):
return x[-1]
x = hnp.EncryptedTensor(hnp.UnsignedInteger(3), shape=(3,))
inputset = [(np.random.randint(0, 2 ** 3, size=(3,)),) for _ in range(32)]
# bench: Measure: Compilation Time (ms)
engine = hnp.compile_numpy_function(
function_to_compile,
{"x": x},
inputset,
compilation_configuration=BENCHMARK_CONFIGURATION,
)
# bench: Measure: End
inputs = []
labels = []
for _ in range(100):
sample_x = np.random.randint(0, 2 ** 3, size=(3,))
inputs.append([sample_x])
labels.append(function_to_compile(*inputs[-1]))
correct = 0
for input_i, label_i in zip(inputs, labels):
# bench: Measure: Evaluation Time (ms)
result_i = engine.run(*input_i)
# bench: Measure: End
if result_i == label_i:
correct += 1
# bench: Measure: Accuracy (%) = (correct / len(inputs)) * 100
# bench: Alert: Accuracy (%) != 100
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,48 @@
# bench: Unit Target: x[1:]
import numpy as np
from common import BENCHMARK_CONFIGURATION
import concrete.numpy as hnp
def main():
def function_to_compile(x):
return x[1:]
x = hnp.EncryptedTensor(hnp.UnsignedInteger(3), shape=(3,))
inputset = [(np.random.randint(0, 2 ** 3, size=(3,)),) for _ in range(32)]
# bench: Measure: Compilation Time (ms)
engine = hnp.compile_numpy_function(
function_to_compile,
{"x": x},
inputset,
compilation_configuration=BENCHMARK_CONFIGURATION,
)
# bench: Measure: End
inputs = []
labels = []
for _ in range(100):
sample_x = np.random.randint(0, 2 ** 3, size=(3,))
inputs.append([sample_x])
labels.append(function_to_compile(*inputs[-1]))
correct = 0
for input_i, label_i in zip(inputs, labels):
# bench: Measure: Evaluation Time (ms)
result_i = engine.run(*input_i)
# bench: Measure: End
if result_i == label_i:
correct += 1
# bench: Measure: Accuracy (%) = (correct / len(inputs)) * 100
# bench: Alert: Accuracy (%) != 100
if __name__ == "__main__":
main()

48
benchmarks/x-reversed.py Normal file
View File

@@ -0,0 +1,48 @@
# bench: Unit Target: x[::-1]
import numpy as np
from common import BENCHMARK_CONFIGURATION
import concrete.numpy as hnp
def main():
def function_to_compile(x):
return x[::-1]
x = hnp.EncryptedTensor(hnp.UnsignedInteger(3), shape=(3,))
inputset = [(np.random.randint(0, 2 ** 3, size=(3,)),) for _ in range(32)]
# bench: Measure: Compilation Time (ms)
engine = hnp.compile_numpy_function(
function_to_compile,
{"x": x},
inputset,
compilation_configuration=BENCHMARK_CONFIGURATION,
)
# bench: Measure: End
inputs = []
labels = []
for _ in range(100):
sample_x = np.random.randint(0, 2 ** 3, size=(3,))
inputs.append([sample_x])
labels.append(function_to_compile(*inputs[-1]))
correct = 0
for input_i, label_i in zip(inputs, labels):
# bench: Measure: Evaluation Time (ms)
result_i = engine.run(*input_i)
# bench: Measure: End
if result_i == label_i:
correct += 1
# bench: Measure: Accuracy (%) = (correct / len(inputs)) * 100
# bench: Alert: Accuracy (%) != 100
if __name__ == "__main__":
main()