mirror of
https://github.com/zama-ai/tfhe-rs.git
synced 2026-01-11 07:38:08 -05:00
Compare commits
452 Commits
0.2.1
...
am/refacto
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
58169f9365 | ||
|
|
e14be3b41a | ||
|
|
f1c21888a7 | ||
|
|
2624beb7fa | ||
|
|
e44c38a102 | ||
|
|
4535230874 | ||
|
|
a7b2d9b228 | ||
|
|
ab923a3ebc | ||
|
|
a0e85fb355 | ||
|
|
ecee305340 | ||
|
|
f08ea8cf85 | ||
|
|
096e320b97 | ||
|
|
95aac64c1c | ||
|
|
76aaa56691 | ||
|
|
a40489bdd2 | ||
|
|
4bf617eb10 | ||
|
|
070073d229 | ||
|
|
6c1ca8e32b | ||
|
|
6523610ca4 | ||
|
|
41c20e22f5 | ||
|
|
4a00d25cb1 | ||
|
|
8c9ee64612 | ||
|
|
bfdfbfac0f | ||
|
|
dbe7bdcd5c | ||
|
|
6d77ff18ad | ||
|
|
7d4d0e0b16 | ||
|
|
b27762232c | ||
|
|
f597d0f06f | ||
|
|
ee188448f3 | ||
|
|
ee49f048c7 | ||
|
|
a9b09ecc45 | ||
|
|
efc243edc9 | ||
|
|
bc34411d3f | ||
|
|
c7923ff3ed | ||
|
|
7534b68e5c | ||
|
|
655f7e6214 | ||
|
|
b8556ddbd4 | ||
|
|
cab7439064 | ||
|
|
f8a8780651 | ||
|
|
bb3c8e7d5d | ||
|
|
69536960c3 | ||
|
|
52a7c52a49 | ||
|
|
751c407ba5 | ||
|
|
492d348138 | ||
|
|
e7df7eb5ef | ||
|
|
380ee52986 | ||
|
|
439a28f68b | ||
|
|
2eb1e37ca7 | ||
|
|
eb1b136c45 | ||
|
|
1376bcba7c | ||
|
|
b5b4e54b9b | ||
|
|
23c2bd790a | ||
|
|
251ee9aa0e | ||
|
|
fad066a996 | ||
|
|
6ef1f22b33 | ||
|
|
8cc8dba1ab | ||
|
|
082328c91a | ||
|
|
fdb6faa0a8 | ||
|
|
856440386f | ||
|
|
2e8189514c | ||
|
|
29b2454cce | ||
|
|
9ed2589c7a | ||
|
|
36b71529e6 | ||
|
|
b738946d72 | ||
|
|
62f1425257 | ||
|
|
44e491b93f | ||
|
|
a470b26672 | ||
|
|
015409424c | ||
|
|
37be751188 | ||
|
|
2580a834af | ||
|
|
a029bd878e | ||
|
|
400e7930b6 | ||
|
|
40d07c6bc3 | ||
|
|
9dd2d39f1c | ||
|
|
4045a3bc2f | ||
|
|
b4ffeccd46 | ||
|
|
7fe3ad3b6e | ||
|
|
7fdd4f9532 | ||
|
|
81eef39ddb | ||
|
|
b6459e3cda | ||
|
|
f2ef78c348 | ||
|
|
aef8f31621 | ||
|
|
df78d178da | ||
|
|
9297a886a4 | ||
|
|
28b4f91a32 | ||
|
|
04fb46e41b | ||
|
|
53da809f37 | ||
|
|
723910c669 | ||
|
|
8ecf8879fb | ||
|
|
2427f744f8 | ||
|
|
422e1f23d5 | ||
|
|
30a5ade17f | ||
|
|
6cdd41c22f | ||
|
|
f369bec394 | ||
|
|
df4e9c69c7 | ||
|
|
0e3d129906 | ||
|
|
682e455c94 | ||
|
|
b553a68fa9 | ||
|
|
be95eadf79 | ||
|
|
0213a11a0c | ||
|
|
413fde3b3b | ||
|
|
40f8ac9adf | ||
|
|
2ab25c1084 | ||
|
|
86c62b70e5 | ||
|
|
18d790fc26 | ||
|
|
e9e3dae786 | ||
|
|
9b1dccbcb4 | ||
|
|
cef011dd91 | ||
|
|
19f7d5af5c | ||
|
|
95ca5a80dc | ||
|
|
b5fded34d1 | ||
|
|
0c3b09c83d | ||
|
|
85a19d30a9 | ||
|
|
f58132c391 | ||
|
|
099bff84aa | ||
|
|
42ad474a46 | ||
|
|
9f6827b803 | ||
|
|
d23c0df449 | ||
|
|
229bfeebe4 | ||
|
|
48aab9d494 | ||
|
|
e4769a8212 | ||
|
|
79bdaaba20 | ||
|
|
02a14fff7c | ||
|
|
72cce4c5b2 | ||
|
|
a317c4b9dd | ||
|
|
2e2bd5ba29 | ||
|
|
827d8d8708 | ||
|
|
bf434be347 | ||
|
|
ed83fbb460 | ||
|
|
0aad2e669b | ||
|
|
cd68a3bd1c | ||
|
|
b77286bcbc | ||
|
|
609f83bbff | ||
|
|
2a8ebb81d8 | ||
|
|
1a2a17a6ab | ||
|
|
0080caf95d | ||
|
|
c26238533b | ||
|
|
b29936d844 | ||
|
|
25914cc727 | ||
|
|
ca229e369b | ||
|
|
4a99e54c0d | ||
|
|
2383591351 | ||
|
|
dc464f398d | ||
|
|
ce70b5758a | ||
|
|
1c76a08373 | ||
|
|
9b19bd1e8b | ||
|
|
a3dde21240 | ||
|
|
005e1afe2f | ||
|
|
17c404b77d | ||
|
|
1403971d15 | ||
|
|
94ad69bfa3 | ||
|
|
bc129ba0ed | ||
|
|
462834a12e | ||
|
|
ebeee1d6f8 | ||
|
|
d0e1a582e1 | ||
|
|
546cb369a8 | ||
|
|
445af7ab97 | ||
|
|
23f8c69bae | ||
|
|
b8df207b68 | ||
|
|
03688aee4c | ||
|
|
5a3652f398 | ||
|
|
ae3c261d1e | ||
|
|
95dcf95e88 | ||
|
|
b92d6400f4 | ||
|
|
9ba27b4082 | ||
|
|
b164c90d75 | ||
|
|
ff893ca6ef | ||
|
|
2dd1e13dad | ||
|
|
4393fce861 | ||
|
|
4f10cfa6dd | ||
|
|
a6e4488de2 | ||
|
|
878f3fa448 | ||
|
|
90b887a56f | ||
|
|
7dc52cf4ef | ||
|
|
a69333ed37 | ||
|
|
ffad25449e | ||
|
|
6d471856c7 | ||
|
|
65749cb39b | ||
|
|
bf36316c12 | ||
|
|
d98bb0eb86 | ||
|
|
5747af6dce | ||
|
|
cf08436c7d | ||
|
|
241bddccaf | ||
|
|
8ce1984214 | ||
|
|
82ef430dfa | ||
|
|
2348303b26 | ||
|
|
a35386f740 | ||
|
|
3df542c5f8 | ||
|
|
37623eedf3 | ||
|
|
fa8cf73d57 | ||
|
|
872b20a4a1 | ||
|
|
4920e3b4df | ||
|
|
75a0881e9d | ||
|
|
f67effc359 | ||
|
|
e8eb82f7ae | ||
|
|
53018ddc36 | ||
|
|
9ef62acff1 | ||
|
|
12220b2a18 | ||
|
|
625c150dc1 | ||
|
|
304932a861 | ||
|
|
59181d4717 | ||
|
|
c5d93f4b38 | ||
|
|
7a465cd258 | ||
|
|
a0946ac509 | ||
|
|
5521f2a4a4 | ||
|
|
1a9e40c860 | ||
|
|
bc3e3e46a0 | ||
|
|
60c87b6d95 | ||
|
|
df4c9f511d | ||
|
|
69bfd6556f | ||
|
|
7fad91e429 | ||
|
|
0da30d5e58 | ||
|
|
97ce5b617a | ||
|
|
a4723b03f3 | ||
|
|
d24d484bcf | ||
|
|
9945cdd9b2 | ||
|
|
04a0aa0c31 | ||
|
|
898c305acd | ||
|
|
f5854db0b1 | ||
|
|
6a56af0b07 | ||
|
|
b7d830c57f | ||
|
|
9b983745da | ||
|
|
10b1305e66 | ||
|
|
58b4089524 | ||
|
|
98db328de2 | ||
|
|
f5fab4db99 | ||
|
|
95f2eef94f | ||
|
|
2c10a792a5 | ||
|
|
96689443ef | ||
|
|
514cb9e6af | ||
|
|
c79da46bb2 | ||
|
|
a8449f1ded | ||
|
|
11517703e6 | ||
|
|
35c6aea84b | ||
|
|
4e37f7e5bf | ||
|
|
a69a9c727b | ||
|
|
bafb4f9e17 | ||
|
|
0663d7ca0e | ||
|
|
8112684aae | ||
|
|
f2f4cb7937 | ||
|
|
e0e6aa845a | ||
|
|
1455da273d | ||
|
|
763ad60ff9 | ||
|
|
6f4f923951 | ||
|
|
5de984f7d6 | ||
|
|
640b849d4b | ||
|
|
7d484583ff | ||
|
|
68aa2ba25a | ||
|
|
228f85d843 | ||
|
|
f982c58538 | ||
|
|
e2e901c220 | ||
|
|
507c569eee | ||
|
|
c37d9c590b | ||
|
|
549e9e70da | ||
|
|
d56afcd8c3 | ||
|
|
2019cd1708 | ||
|
|
3cfee104cb | ||
|
|
4b174d552a | ||
|
|
1764c88de0 | ||
|
|
e4af2bad0f | ||
|
|
59ef915095 | ||
|
|
10f034171f | ||
|
|
5e0aff616e | ||
|
|
9687c55eb6 | ||
|
|
222c5e1c19 | ||
|
|
ea47265f15 | ||
|
|
465b79f42d | ||
|
|
2557b29230 | ||
|
|
490bdaea30 | ||
|
|
936ac05e51 | ||
|
|
d496cfa431 | ||
|
|
16be1c1c1d | ||
|
|
f2f4e397f1 | ||
|
|
facc2a162f | ||
|
|
5981a886fd | ||
|
|
e98315fa60 | ||
|
|
6b235f6fef | ||
|
|
4d376eea39 | ||
|
|
d93ddbe897 | ||
|
|
189018ed05 | ||
|
|
fdae4e958c | ||
|
|
d5ef359a04 | ||
|
|
a52cd6454d | ||
|
|
142851792a | ||
|
|
e52bc09db5 | ||
|
|
5bea1e0bc0 | ||
|
|
224d81378a | ||
|
|
011cb48ded | ||
|
|
da05f16c10 | ||
|
|
ffc2472c95 | ||
|
|
c0b82c77fb | ||
|
|
b09dc1f3ca | ||
|
|
a8e8a2e555 | ||
|
|
61819b2cea | ||
|
|
1ee4440c0a | ||
|
|
cbfaf63964 | ||
|
|
6ac96bb46a | ||
|
|
f9b49eeb39 | ||
|
|
fdda5c56f2 | ||
|
|
cb20b4ad3a | ||
|
|
fb653ef9b2 | ||
|
|
2e58fe36a4 | ||
|
|
2cbd8c9fd5 | ||
|
|
11ac8e6cb9 | ||
|
|
5f635e97fa | ||
|
|
7426e441ba | ||
|
|
8ae799c477 | ||
|
|
ee232ed81e | ||
|
|
16f4c721ab | ||
|
|
7ea13715ee | ||
|
|
a924b6ebde | ||
|
|
2b5d39c927 | ||
|
|
aafcbd0a3f | ||
|
|
e810b42eb6 | ||
|
|
352b282149 | ||
|
|
bc5f648c35 | ||
|
|
7f83761fde | ||
|
|
8c3993def2 | ||
|
|
a361ad339d | ||
|
|
aa82d9f19c | ||
|
|
eae2d8137b | ||
|
|
ca127b2878 | ||
|
|
80b5ce7f63 | ||
|
|
2469aa0c2a | ||
|
|
48b307c627 | ||
|
|
8100b2d0de | ||
|
|
aab390470c | ||
|
|
f8f723f42d | ||
|
|
1afdc71689 | ||
|
|
120f6b0304 | ||
|
|
1d817c45d5 | ||
|
|
5690796da4 | ||
|
|
00c4eb417b | ||
|
|
42374db7cb | ||
|
|
f98127498e | ||
|
|
d70729668e | ||
|
|
8d339f2fbf | ||
|
|
c4a73f4f44 | ||
|
|
bd061dc85c | ||
|
|
2defb5a669 | ||
|
|
1b0f3631d4 | ||
|
|
06ddfe893a | ||
|
|
b69c9e7e7a | ||
|
|
18ed2e29a1 | ||
|
|
3a17ebd2fa | ||
|
|
dd15fd1b05 | ||
|
|
097ea6500c | ||
|
|
9e307a8945 | ||
|
|
f8b497a4b8 | ||
|
|
189f02b696 | ||
|
|
5654fe7981 | ||
|
|
2b83a1fec0 | ||
|
|
efac3c842f | ||
|
|
7dbb4485bc | ||
|
|
a5906bb7cb | ||
|
|
90b7494acd | ||
|
|
3508019cd2 | ||
|
|
200c8a177a | ||
|
|
2f6c1cf0b5 | ||
|
|
b96027f417 | ||
|
|
90c850ca0d | ||
|
|
c8d3008a8d | ||
|
|
08c264f193 | ||
|
|
4ae202d8a4 | ||
|
|
7eb8601540 | ||
|
|
8a1691c536 | ||
|
|
d1cb55ba24 | ||
|
|
2b9a49db87 | ||
|
|
62ddb24f00 | ||
|
|
c6ae463b41 | ||
|
|
4947eefad4 | ||
|
|
71209e3927 | ||
|
|
2a66ea3d16 | ||
|
|
d4ff1f5595 | ||
|
|
8ae92a960d | ||
|
|
b042c2f7d6 | ||
|
|
e307da5c7f | ||
|
|
3d5b88d608 | ||
|
|
4fbf0691c5 | ||
|
|
5d277e85b9 | ||
|
|
778eea30e9 | ||
|
|
63247fa227 | ||
|
|
799291a1f0 | ||
|
|
509fe7a63e | ||
|
|
4eac45f0c6 | ||
|
|
ddb3451087 | ||
|
|
e66a329e33 | ||
|
|
d79b1d9b19 | ||
|
|
b501cc078a | ||
|
|
800878d89e | ||
|
|
20d0e81bae | ||
|
|
d3dbf4ecc9 | ||
|
|
c20ca07cd3 | ||
|
|
9f6c7e9139 | ||
|
|
3c8d6a6f8b | ||
|
|
1c837fa6f0 | ||
|
|
1ec7e4762a | ||
|
|
20fb697d57 | ||
|
|
0429d56cf3 | ||
|
|
509bf3e284 | ||
|
|
b2fc1d5266 | ||
|
|
62d94dbee8 | ||
|
|
fbe911d7db | ||
|
|
ba72faf828 | ||
|
|
c387b9340f | ||
|
|
cbb7d30fb8 | ||
|
|
6e4a707eff | ||
|
|
06b700f904 | ||
|
|
cfbabf7480 | ||
|
|
291ed9026f | ||
|
|
610f0010b8 | ||
|
|
8b3d31ae8a | ||
|
|
98539aaa61 | ||
|
|
9a80a01dc3 | ||
|
|
ecf9d50058 | ||
|
|
65e4aab38d | ||
|
|
ac348870ba | ||
|
|
6adfcaa5f7 | ||
|
|
bc6bbe66d9 | ||
|
|
871d4aea17 | ||
|
|
f81376b762 | ||
|
|
64813bae18 | ||
|
|
16ce2a8a3f | ||
|
|
f018987eac | ||
|
|
20f6c5419b | ||
|
|
58b530f40b | ||
|
|
689ad195f3 | ||
|
|
4a1eda25d3 | ||
|
|
af936df064 | ||
|
|
0233a69ea6 | ||
|
|
f72a6ec835 | ||
|
|
25a2586eae | ||
|
|
c112a43a63 | ||
|
|
2813812380 | ||
|
|
84a6036789 | ||
|
|
658368d0b6 | ||
|
|
9368049adf | ||
|
|
5e8ca0b52c | ||
|
|
605cd5b3b0 | ||
|
|
4bfe9c22d4 | ||
|
|
1c0b36c672 | ||
|
|
7dccb01a8d | ||
|
|
7bff348367 | ||
|
|
74a5a278b6 | ||
|
|
426ced3295 | ||
|
|
7af5fcc7eb | ||
|
|
12d9947149 | ||
|
|
7c54896e68 | ||
|
|
04533dedfe | ||
|
|
d01be35557 | ||
|
|
9fc32e2f52 | ||
|
|
e1e78b8b9d |
@@ -5,13 +5,3 @@ failure-output = "final"
|
||||
fail-fast = false
|
||||
retries = 0
|
||||
slow-timeout = "5m"
|
||||
|
||||
|
||||
[[profile.ci.overrides]]
|
||||
filter = 'test(/^.*param_message_1_carry_[567]$/) or test(/^.*param_message_4_carry_4$/)'
|
||||
retries = 3
|
||||
|
||||
[[profile.ci.overrides]]
|
||||
filter = 'test(/^.*param_message_[23]_carry_[23]$/)'
|
||||
retries = 1
|
||||
|
||||
|
||||
13
.github/pull_request_template.md
vendored
Normal file
13
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
<!-- Feel free to delete the template if the PR (bumping a version e.g.) does not fit the template -->
|
||||
closes: _please link all relevant issues_
|
||||
|
||||
### PR content/description
|
||||
|
||||
### Check-list:
|
||||
|
||||
* [ ] Tests for the changes have been added (for bug fixes / features)
|
||||
* [ ] Docs have been added / updated (for bug fixes / features)
|
||||
* [ ] Relevant issues are marked as resolved/closed, related issues are linked in the description
|
||||
* [ ] Check for breaking changes (including serialization changes) and add them to commit message following the conventional commit [specification][conventional-breaking]
|
||||
|
||||
[conventional-breaking]: https://www.conventionalcommits.org/en/v1.0.0/#commit-message-with-description-and-breaking-change-footer
|
||||
127
.github/workflows/aws_tfhe_fast_tests.yml
vendored
Normal file
127
.github/workflows/aws_tfhe_fast_tests.yml
vendored
Normal file
@@ -0,0 +1,127 @@
|
||||
# Run a small subset of shortint and integer tests to ensure quick feedback.
|
||||
name: Fast AWS Tests on CPU
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
RUSTFLAGS: "-C target-cpu=native"
|
||||
|
||||
on:
|
||||
# Allows you to run this workflow manually from the Actions tab as an alternative.
|
||||
workflow_dispatch:
|
||||
# All the inputs are provided by Slab
|
||||
inputs:
|
||||
instance_id:
|
||||
description: "AWS instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: "AWS instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: "AWS instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
type: string
|
||||
fork_repo:
|
||||
description: 'Name of forked repo as user/repo'
|
||||
type: string
|
||||
fork_git_sha:
|
||||
description: 'Git SHA to checkout from fork'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
fast-tests:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}_${{ github.ref }}_${{ inputs.instance_image_id }}_${{ inputs.instance_type }}
|
||||
cancel-in-progress: true
|
||||
runs-on: ${{ inputs.runner_name }}
|
||||
steps:
|
||||
# Step used for log purpose.
|
||||
- name: Instance configuration used
|
||||
run: |
|
||||
echo "ID: ${{ inputs.instance_id }}"
|
||||
echo "AMI: ${{ inputs.instance_image_id }}"
|
||||
echo "Type: ${{ inputs.instance_type }}"
|
||||
echo "Request ID: ${{ inputs.request_id }}"
|
||||
echo "Fork repo: ${{ inputs.fork_repo }}"
|
||||
echo "Fork git sha: ${{ inputs.fork_git_sha }}"
|
||||
|
||||
- name: Checkout tfhe-rs
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: ${{ inputs.fork_repo }}
|
||||
ref: ${{ inputs.fork_git_sha }}
|
||||
|
||||
- name: Set up home
|
||||
run: |
|
||||
echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Install latest stable
|
||||
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af
|
||||
with:
|
||||
toolchain: stable
|
||||
default: true
|
||||
|
||||
- name: Run concrete-csprng tests
|
||||
run: |
|
||||
make test_concrete_csprng
|
||||
|
||||
- name: Run core tests
|
||||
run: |
|
||||
AVX512_SUPPORT=ON make test_core_crypto
|
||||
|
||||
- name: Run boolean tests
|
||||
run: |
|
||||
make test_boolean
|
||||
|
||||
- name: Run user docs tests
|
||||
run: |
|
||||
make test_user_doc
|
||||
|
||||
- name: Run js on wasm API tests
|
||||
run: |
|
||||
make test_nodejs_wasm_api_in_docker
|
||||
|
||||
- name: Gen Keys if required
|
||||
run: |
|
||||
make gen_key_cache
|
||||
|
||||
- name: Run shortint tests
|
||||
run: |
|
||||
BIG_TESTS_INSTANCE=TRUE FAST_TESTS=TRUE make test_shortint_ci
|
||||
|
||||
- name: Run integer tests
|
||||
run: |
|
||||
BIG_TESTS_INSTANCE=TRUE FAST_TESTS=TRUE make test_integer_ci
|
||||
|
||||
- name: Run shortint multi-bit tests
|
||||
run: |
|
||||
BIG_TESTS_INSTANCE=TRUE FAST_TESTS=TRUE make test_shortint_multi_bit_ci
|
||||
|
||||
- name: Run integer multi-bit tests
|
||||
run: |
|
||||
BIG_TESTS_INSTANCE=TRUE FAST_TESTS=TRUE make test_integer_multi_bit_ci
|
||||
|
||||
- name: Run high-level API tests
|
||||
run: |
|
||||
make test_high_level_api
|
||||
|
||||
- name: Run safe deserialization tests
|
||||
run: |
|
||||
make test_safe_deserialization
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "Fast AWS tests finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
31
.github/workflows/aws_tfhe_integer_tests.yml
vendored
31
.github/workflows/aws_tfhe_integer_tests.yml
vendored
@@ -25,26 +25,35 @@ on:
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
type: string
|
||||
matrix_item:
|
||||
description: 'Build matrix item'
|
||||
fork_repo:
|
||||
description: 'Name of forked repo as user/repo'
|
||||
type: string
|
||||
fork_git_sha:
|
||||
description: 'Git SHA to checkout from fork'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
integer-tests:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}_${{ github.ref }}_${{ github.event.inputs.instance_image_id }}_${{ github.event.inputs.instance_type }}
|
||||
group: ${{ github.workflow }}_${{ github.ref }}_${{ inputs.instance_image_id }}_${{ inputs.instance_type }}
|
||||
cancel-in-progress: true
|
||||
runs-on: ${{ github.event.inputs.runner_name }}
|
||||
runs-on: ${{ inputs.runner_name }}
|
||||
steps:
|
||||
# Step used for log purpose.
|
||||
- name: Instance configuration used
|
||||
run: |
|
||||
echo "ID: ${{ github.event.inputs.instance_id }}"
|
||||
echo "AMI: ${{ github.event.inputs.instance_image_id }}"
|
||||
echo "Type: ${{ github.event.inputs.instance_type }}"
|
||||
echo "Request ID: ${{ github.event.inputs.request_id }}"
|
||||
echo "ID: ${{ inputs.instance_id }}"
|
||||
echo "AMI: ${{ inputs.instance_image_id }}"
|
||||
echo "Type: ${{ inputs.instance_type }}"
|
||||
echo "Request ID: ${{ inputs.request_id }}"
|
||||
echo "Fork repo: ${{ inputs.fork_repo }}"
|
||||
echo "Fork git sha: ${{ inputs.fork_git_sha }}"
|
||||
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- name: Checkout tfhe-rs
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: ${{ inputs.fork_repo }}
|
||||
ref: ${{ inputs.fork_git_sha }}
|
||||
|
||||
- name: Set up home
|
||||
run: |
|
||||
@@ -62,12 +71,12 @@ jobs:
|
||||
|
||||
- name: Run integer tests
|
||||
run: |
|
||||
BIG_TESTS_INSTANCE=TRUE make test_integer_ci
|
||||
AVX512_SUPPORT=ON BIG_TESTS_INSTANCE=TRUE make test_integer_ci
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
|
||||
90
.github/workflows/aws_tfhe_multi_bit_tests.yml
vendored
Normal file
90
.github/workflows/aws_tfhe_multi_bit_tests.yml
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
name: AWS Multi Bit Tests on CPU
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
RUSTFLAGS: "-C target-cpu=native"
|
||||
|
||||
on:
|
||||
# Allows you to run this workflow manually from the Actions tab as an alternative.
|
||||
workflow_dispatch:
|
||||
# All the inputs are provided by Slab
|
||||
inputs:
|
||||
instance_id:
|
||||
description: "AWS instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: "AWS instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: "AWS instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
type: string
|
||||
fork_repo:
|
||||
description: 'Name of forked repo as user/repo'
|
||||
type: string
|
||||
fork_git_sha:
|
||||
description: 'Git SHA to checkout from fork'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
multi-bit-tests:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}_${{ github.ref }}_${{ inputs.instance_image_id }}_${{ inputs.instance_type }}
|
||||
cancel-in-progress: true
|
||||
runs-on: ${{ inputs.runner_name }}
|
||||
steps:
|
||||
# Step used for log purpose.
|
||||
- name: Instance configuration used
|
||||
run: |
|
||||
echo "ID: ${{ inputs.instance_id }}"
|
||||
echo "AMI: ${{ inputs.instance_image_id }}"
|
||||
echo "Type: ${{ inputs.instance_type }}"
|
||||
echo "Request ID: ${{ inputs.request_id }}"
|
||||
echo "Fork repo: ${{ inputs.fork_repo }}"
|
||||
echo "Fork git sha: ${{ inputs.fork_git_sha }}"
|
||||
|
||||
- name: Checkout tfhe-rs
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: ${{ inputs.fork_repo }}
|
||||
ref: ${{ inputs.fork_git_sha }}
|
||||
|
||||
- name: Set up home
|
||||
run: |
|
||||
echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Install latest stable
|
||||
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af
|
||||
with:
|
||||
toolchain: stable
|
||||
default: true
|
||||
|
||||
- name: Gen Keys if required
|
||||
run: |
|
||||
make GEN_KEY_CACHE_MULTI_BIT_ONLY=TRUE gen_key_cache
|
||||
|
||||
- name: Run shortint multi-bit tests
|
||||
run: |
|
||||
make test_shortint_multi_bit_ci
|
||||
|
||||
- name: Run integer multi-bit tests
|
||||
run: |
|
||||
AVX512_SUPPORT=ON make test_integer_multi_bit_ci
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "Shortint tests finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
41
.github/workflows/aws_tfhe_tests.yml
vendored
41
.github/workflows/aws_tfhe_tests.yml
vendored
@@ -25,26 +25,35 @@ on:
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
type: string
|
||||
matrix_item:
|
||||
description: 'Build matrix item'
|
||||
fork_repo:
|
||||
description: 'Name of forked repo as user/repo'
|
||||
type: string
|
||||
fork_git_sha:
|
||||
description: 'Git SHA to checkout from fork'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
shortint-tests:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}_${{ github.ref }}_${{ github.event.inputs.instance_image_id }}_${{ github.event.inputs.instance_type }}
|
||||
group: ${{ github.workflow }}_${{ github.ref }}_${{ inputs.instance_image_id }}_${{ inputs.instance_type }}
|
||||
cancel-in-progress: true
|
||||
runs-on: ${{ github.event.inputs.runner_name }}
|
||||
runs-on: ${{ inputs.runner_name }}
|
||||
steps:
|
||||
# Step used for log purpose.
|
||||
- name: Instance configuration used
|
||||
run: |
|
||||
echo "ID: ${{ github.event.inputs.instance_id }}"
|
||||
echo "AMI: ${{ github.event.inputs.instance_image_id }}"
|
||||
echo "Type: ${{ github.event.inputs.instance_type }}"
|
||||
echo "Request ID: ${{ github.event.inputs.request_id }}"
|
||||
echo "ID: ${{ inputs.instance_id }}"
|
||||
echo "AMI: ${{ inputs.instance_image_id }}"
|
||||
echo "Type: ${{ inputs.instance_type }}"
|
||||
echo "Request ID: ${{ inputs.request_id }}"
|
||||
echo "Fork repo: ${{ inputs.fork_repo }}"
|
||||
echo "Fork git sha: ${{ inputs.fork_git_sha }}"
|
||||
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- name: Checkout tfhe-rs
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: ${{ inputs.fork_repo }}
|
||||
ref: ${{ inputs.fork_git_sha }}
|
||||
|
||||
- name: Set up home
|
||||
run: |
|
||||
@@ -56,6 +65,10 @@ jobs:
|
||||
toolchain: stable
|
||||
default: true
|
||||
|
||||
- name: Run concrete-csprng tests
|
||||
run: |
|
||||
make test_concrete_csprng
|
||||
|
||||
- name: Run core tests
|
||||
run: |
|
||||
AVX512_SUPPORT=ON make test_core_crypto
|
||||
@@ -72,10 +85,6 @@ jobs:
|
||||
run: |
|
||||
make test_user_doc
|
||||
|
||||
- name: Run js on wasm API tests
|
||||
run: |
|
||||
make test_nodejs_wasm_api_in_docker
|
||||
|
||||
- name: Gen Keys if required
|
||||
run: |
|
||||
make gen_key_cache
|
||||
@@ -88,10 +97,14 @@ jobs:
|
||||
run: |
|
||||
BIG_TESTS_INSTANCE=TRUE make test_high_level_api
|
||||
|
||||
- name: Run example tests
|
||||
run: |
|
||||
make test_examples
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
|
||||
87
.github/workflows/aws_tfhe_wasm_tests.yml
vendored
Normal file
87
.github/workflows/aws_tfhe_wasm_tests.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
name: AWS WASM Tests on CPU
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
RUSTFLAGS: "-C target-cpu=native"
|
||||
|
||||
on:
|
||||
# Allows you to run this workflow manually from the Actions tab as an alternative.
|
||||
workflow_dispatch:
|
||||
# All the inputs are provided by Slab
|
||||
inputs:
|
||||
instance_id:
|
||||
description: "AWS instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: "AWS instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: "AWS instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
type: string
|
||||
fork_repo:
|
||||
description: 'Name of forked repo as user/repo'
|
||||
type: string
|
||||
fork_git_sha:
|
||||
description: 'Git SHA to checkout from fork'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
wasm-tests:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}_${{ github.ref }}_${{ inputs.instance_image_id }}_${{ inputs.instance_type }}
|
||||
cancel-in-progress: true
|
||||
runs-on: ${{ inputs.runner_name }}
|
||||
steps:
|
||||
# Step used for log purpose.
|
||||
- name: Instance configuration used
|
||||
run: |
|
||||
echo "ID: ${{ inputs.instance_id }}"
|
||||
echo "AMI: ${{ inputs.instance_image_id }}"
|
||||
echo "Type: ${{ inputs.instance_type }}"
|
||||
echo "Request ID: ${{ inputs.request_id }}"
|
||||
echo "Fork repo: ${{ inputs.fork_repo }}"
|
||||
echo "Fork git sha: ${{ inputs.fork_git_sha }}"
|
||||
|
||||
- name: Checkout tfhe-rs
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: ${{ inputs.fork_repo }}
|
||||
ref: ${{ inputs.fork_git_sha }}
|
||||
|
||||
- name: Set up home
|
||||
run: |
|
||||
echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Install latest stable
|
||||
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af
|
||||
with:
|
||||
toolchain: stable
|
||||
default: true
|
||||
|
||||
- name: Run js on wasm API tests
|
||||
run: |
|
||||
make test_nodejs_wasm_api_in_docker
|
||||
|
||||
- name: Run parallel wasm tests
|
||||
run: |
|
||||
make install_node
|
||||
make ci_test_web_js_api_parallel
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "WASM tests finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
63
.github/workflows/boolean_benchmark.yml
vendored
63
.github/workflows/boolean_benchmark.yml
vendored
@@ -5,24 +5,33 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
instance_id:
|
||||
description: 'Instance ID'
|
||||
description: "Instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: 'Instance AMI ID'
|
||||
description: "Instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: 'Instance product type'
|
||||
description: "Instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: 'Action runner name'
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
description: "Slab request ID"
|
||||
type: string
|
||||
# This input is not used in this workflow but still mandatory since a calling workflow could
|
||||
# use it. If a triggering command include a user_inputs field, then the triggered workflow
|
||||
# must include this very input, otherwise the workflow won't be called.
|
||||
# See start_full_benchmarks.yml as example.
|
||||
user_inputs:
|
||||
description: "Type of benchmarks to run"
|
||||
type: string
|
||||
default: "weekly_benchmarks"
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
run-boolean-benchmarks:
|
||||
@@ -42,7 +51,7 @@ jobs:
|
||||
echo "BENCH_DATE=$(date --iso-8601=seconds)" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Checkout tfhe-rs repo with tags
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -57,9 +66,9 @@ jobs:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
|
||||
- name: Run benchmarks
|
||||
- name: Run benchmarks with AVX512
|
||||
run: |
|
||||
make bench_boolean
|
||||
make AVX512_SUPPORT=ON bench_boolean
|
||||
|
||||
- name: Parse results
|
||||
run: |
|
||||
@@ -73,24 +82,8 @@ jobs:
|
||||
--commit-date "${COMMIT_DATE}" \
|
||||
--bench-date "${{ env.BENCH_DATE }}" \
|
||||
--walk-subdirs \
|
||||
--throughput
|
||||
|
||||
- name: Remove previous raw results
|
||||
run: |
|
||||
rm -rf target/criterion
|
||||
|
||||
- name: Run benchmarks with AVX512
|
||||
run: |
|
||||
make AVX512_SUPPORT=ON bench_boolean
|
||||
|
||||
- name: Parse AVX512 results
|
||||
run: |
|
||||
python3 ./ci/benchmark_parser.py target/criterion ${{ env.RESULTS_FILENAME }} \
|
||||
--hardware ${{ inputs.instance_type }} \
|
||||
--name-suffix avx512 \
|
||||
--walk-subdirs \
|
||||
--throughput \
|
||||
--append-results
|
||||
--throughput
|
||||
|
||||
- name: Measure key sizes
|
||||
run: |
|
||||
@@ -101,15 +94,15 @@ jobs:
|
||||
python3 ./ci/benchmark_parser.py tfhe/boolean_key_sizes.csv ${{ env.RESULTS_FILENAME }} \
|
||||
--key-sizes \
|
||||
--append-results
|
||||
|
||||
|
||||
- name: Upload parsed results artifact
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: ${{ github.sha }}_boolean
|
||||
path: ${{ env.RESULTS_FILENAME }}
|
||||
|
||||
- name: Checkout Slab repo
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: zama-ai/slab
|
||||
path: slab
|
||||
@@ -117,8 +110,6 @@ jobs:
|
||||
|
||||
- name: Send data to Slab
|
||||
shell: bash
|
||||
env:
|
||||
COMPRESSED_RESULTS : ${{ env.RESULTS_FILENAME }}.gz
|
||||
run: |
|
||||
echo "Computing HMac on results file"
|
||||
SIGNATURE="$(slab/scripts/hmac_calculator.sh ${{ env.RESULTS_FILENAME }} '${{ secrets.JOB_SECRET }}')"
|
||||
@@ -130,3 +121,15 @@ jobs:
|
||||
-H "X-Hub-Signature-256: sha256=${SIGNATURE}" \
|
||||
-d @${{ env.RESULTS_FILENAME }} \
|
||||
${{ secrets.SLAB_URL }}
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ failure() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "Boolean benchmarks failed. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
|
||||
16
.github/workflows/cargo_build.yml
vendored
16
.github/workflows/cargo_build.yml
vendored
@@ -21,12 +21,26 @@ jobs:
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
|
||||
- name: Install and run newline linter checks
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
wget https://github.com/fernandrone/linelint/releases/download/0.0.6/linelint-linux-amd64
|
||||
echo "16b70fb7b471d6f95cbdc0b4e5dc2b0ac9e84ba9ecdc488f7bdf13df823aca4b linelint-linux-amd64" > checksum
|
||||
sha256sum -c checksum || exit 1
|
||||
chmod +x linelint-linux-amd64
|
||||
mv linelint-linux-amd64 /usr/local/bin/linelint
|
||||
make check_newline
|
||||
|
||||
- name: Run pcc checks
|
||||
run: |
|
||||
make pcc
|
||||
|
||||
- name: Build concrete-csprng
|
||||
run: |
|
||||
make build_concrete_csprng
|
||||
|
||||
- name: Build Release core
|
||||
run: |
|
||||
make build_core AVX512_SUPPORT=ON
|
||||
|
||||
2
.github/workflows/check_commit.yml
vendored
2
.github/workflows/check_commit.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
- name: Check first line
|
||||
uses: gsactions/commit-message-checker@16fa2d5de096ae0d35626443bcd24f1e756cafee
|
||||
with:
|
||||
pattern: '^((feat|fix|chore|refactor|style|test|docs|doc)\(\w+\)\:) .+$'
|
||||
pattern: '^((feat|fix|chore|refactor|style|test|docs|doc)(\(\w+\))?\:) .+$'
|
||||
flags: "gs"
|
||||
error: 'Your first line has to contain a commit type and scope like "feat(my_feature): msg".'
|
||||
excludeDescription: "true" # optional: this excludes the description body of a pull request
|
||||
|
||||
112
.github/workflows/code_coverage.yml
vendored
Normal file
112
.github/workflows/code_coverage.yml
vendored
Normal file
@@ -0,0 +1,112 @@
|
||||
name: Code Coverage
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
RUSTFLAGS: "-C target-cpu=native"
|
||||
|
||||
on:
|
||||
# Allows you to run this workflow manually from the Actions tab as an alternative.
|
||||
workflow_dispatch:
|
||||
# All the inputs are provided by Slab
|
||||
inputs:
|
||||
instance_id:
|
||||
description: "AWS instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: "AWS instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: "AWS instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
type: string
|
||||
fork_repo:
|
||||
description: 'Name of forked repo as user/repo'
|
||||
type: string
|
||||
fork_git_sha:
|
||||
description: 'Git SHA to checkout from fork'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
code-coverage:
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}_${{ github.ref }}_${{ inputs.instance_image_id }}_${{ inputs.instance_type }}
|
||||
cancel-in-progress: true
|
||||
runs-on: ${{ inputs.runner_name }}
|
||||
steps:
|
||||
# Step used for log purpose.
|
||||
- name: Instance configuration used
|
||||
run: |
|
||||
echo "ID: ${{ inputs.instance_id }}"
|
||||
echo "AMI: ${{ inputs.instance_image_id }}"
|
||||
echo "Type: ${{ inputs.instance_type }}"
|
||||
echo "Request ID: ${{ inputs.request_id }}"
|
||||
echo "Fork repo: ${{ inputs.fork_repo }}"
|
||||
echo "Fork git sha: ${{ inputs.fork_git_sha }}"
|
||||
|
||||
- name: Checkout tfhe-rs
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: ${{ inputs.fork_repo }}
|
||||
ref: ${{ inputs.fork_git_sha }}
|
||||
|
||||
- name: Set up home
|
||||
run: |
|
||||
echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Install latest stable
|
||||
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af
|
||||
with:
|
||||
toolchain: stable
|
||||
default: true
|
||||
|
||||
- name: Check for file changes
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@408093d9ff9c134c33b974e0722ce06b9d6e8263
|
||||
with:
|
||||
files_yaml: |
|
||||
tfhe:
|
||||
- tfhe/src/**
|
||||
concrete_csprng:
|
||||
- concrete-csprng/src/**
|
||||
|
||||
- name: Generate Keys
|
||||
if: steps.changed-files.outputs.tfhe_any_changed == 'true'
|
||||
run: |
|
||||
make GEN_KEY_CACHE_COVERAGE_ONLY=TRUE gen_key_cache
|
||||
|
||||
- name: Run coverage for boolean
|
||||
if: steps.changed-files.outputs.tfhe_any_changed == 'true'
|
||||
run: |
|
||||
make test_boolean_cov
|
||||
|
||||
- name: Run coverage for shortint
|
||||
if: steps.changed-files.outputs.tfhe_any_changed == 'true'
|
||||
run: |
|
||||
make test_shortint_cov
|
||||
|
||||
- name: Upload tfhe coverage to Codecov
|
||||
uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d
|
||||
if: steps.changed-files.outputs.tfhe_any_changed == 'true'
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
directory: ./coverage/
|
||||
fail_ci_if_error: true
|
||||
files: shortint/cobertura.xml,boolean/cobertura.xml
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ failure() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "Code coverage finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
74
.github/workflows/csprng_randomness_testing.yml
vendored
Normal file
74
.github/workflows/csprng_randomness_testing.yml
vendored
Normal file
@@ -0,0 +1,74 @@
|
||||
name: CSPRNG randomness testing Workflow
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
RUSTFLAGS: "-C target-cpu=native"
|
||||
|
||||
on:
|
||||
# Allows you to run this workflow manually from the Actions tab as an alternative.
|
||||
workflow_dispatch:
|
||||
# All the inputs are provided by Slab
|
||||
inputs:
|
||||
instance_id:
|
||||
description: "AWS instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: "AWS instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: "AWS instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
type: string
|
||||
fork_repo:
|
||||
description: 'Name of forked repo as user/repo'
|
||||
type: string
|
||||
fork_git_sha:
|
||||
description: 'Git SHA to checkout from fork'
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
csprng-randomness-teting:
|
||||
name: CSPRNG randomness testing
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}_${{ github.ref }}_${{ inputs.instance_image_id }}_${{ inputs.instance_type }}
|
||||
cancel-in-progress: true
|
||||
runs-on: ${{ inputs.runner_name }}
|
||||
|
||||
steps:
|
||||
- name: Checkout tfhe-rs
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: ${{ inputs.fork_repo }}
|
||||
ref: ${{ inputs.fork_git_sha }}
|
||||
|
||||
- name: Set up home
|
||||
run: |
|
||||
echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Install latest stable
|
||||
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af
|
||||
with:
|
||||
toolchain: stable
|
||||
default: true
|
||||
|
||||
- name: Dieharder randomness test suite
|
||||
run: |
|
||||
make dieharder_csprng
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ failure() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "concrete-csprng randomness check finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
63
.github/workflows/integer_benchmark.yml
vendored
63
.github/workflows/integer_benchmark.yml
vendored
@@ -5,24 +5,26 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
instance_id:
|
||||
description: 'Instance ID'
|
||||
description: "Instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: 'Instance AMI ID'
|
||||
description: "Instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: 'Instance product type'
|
||||
description: "Instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: 'Action runner name'
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
description: "Slab request ID"
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json
|
||||
PARSE_INTEGER_BENCH_CSV_FILE: tfhe_rs_integer_benches_${{ github.sha }}.csv
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
run-integer-benchmarks:
|
||||
@@ -42,7 +44,7 @@ jobs:
|
||||
echo "BENCH_DATE=$(date --iso-8601=seconds)" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Checkout tfhe-rs repo with tags
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -57,9 +59,20 @@ jobs:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
|
||||
- name: Run benchmarks
|
||||
- name: Run benchmarks with AVX512
|
||||
run: |
|
||||
make bench_integer
|
||||
make AVX512_SUPPORT=ON FAST_BENCH=TRUE bench_integer
|
||||
|
||||
- name: Parse benchmarks to csv
|
||||
run: |
|
||||
make PARSE_INTEGER_BENCH_CSV_FILE=${{ env.PARSE_INTEGER_BENCH_CSV_FILE }} \
|
||||
parse_integer_benches
|
||||
|
||||
- name: Upload csv results artifact
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: ${{ github.sha }}_csv_integer
|
||||
path: ${{ env.PARSE_INTEGER_BENCH_CSV_FILE }}
|
||||
|
||||
- name: Parse results
|
||||
run: |
|
||||
@@ -73,33 +86,17 @@ jobs:
|
||||
--commit-date "${COMMIT_DATE}" \
|
||||
--bench-date "${{ env.BENCH_DATE }}" \
|
||||
--walk-subdirs \
|
||||
--name-suffix avx512 \
|
||||
--throughput
|
||||
|
||||
- name: Remove previous raw results
|
||||
run: |
|
||||
rm -rf target/criterion
|
||||
|
||||
- name: Run benchmarks with AVX512
|
||||
run: |
|
||||
make AVX512_SUPPORT=ON bench_integer
|
||||
|
||||
- name: Parse AVX512 results
|
||||
run: |
|
||||
python3 ./ci/benchmark_parser.py target/criterion ${{ env.RESULTS_FILENAME }} \
|
||||
--hardware ${{ inputs.instance_type }} \
|
||||
--walk-subdirs \
|
||||
--name-suffix avx512 \
|
||||
--throughput \
|
||||
--append-results
|
||||
|
||||
- name: Upload parsed results artifact
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: ${{ github.sha }}_integer
|
||||
path: ${{ env.RESULTS_FILENAME }}
|
||||
|
||||
- name: Checkout Slab repo
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: zama-ai/slab
|
||||
path: slab
|
||||
@@ -118,3 +115,15 @@ jobs:
|
||||
-H "X-Hub-Signature-256: sha256=${SIGNATURE}" \
|
||||
-d @${{ env.RESULTS_FILENAME }} \
|
||||
${{ secrets.SLAB_URL }}
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ failure() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "Integer benchmarks failed. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
|
||||
157
.github/workflows/integer_full_benchmark.yml
vendored
Normal file
157
.github/workflows/integer_full_benchmark.yml
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
# Run all integer benchmarks on an AWS instance and return parsed results to Slab CI bot.
|
||||
name: Integer full benchmarks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
instance_id:
|
||||
description: "Instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: "Instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: "Instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: "Slab request ID"
|
||||
type: string
|
||||
user_inputs:
|
||||
description: "Type of benchmarks to run"
|
||||
type: string
|
||||
default: "weekly_benchmarks"
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
prepare-matrix:
|
||||
name: Prepare operations matrix
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
op_flavor: ${{ steps.set_op_flavor.outputs.op_flavor }}
|
||||
steps:
|
||||
- name: Weekly benchmarks
|
||||
if: ${{ github.event.inputs.user_inputs == 'weekly_benchmarks' }}
|
||||
run: |
|
||||
echo "OP_FLAVOR=[\"default\", \"default_comp\", \"default_scalar\", \"default_scalar_comp\"]" >> ${GITHUB_ENV}
|
||||
|
||||
- name: Quarterly benchmarks
|
||||
if: ${{ github.event.inputs.user_inputs == 'quarterly_benchmarks' }}
|
||||
run: |
|
||||
echo "OP_FLAVOR=[\"default\", \"default_comp\", \"default_scalar\", \"default_scalar_comp\", \
|
||||
\"smart\", \"smart_comp\", \"smart_scalar\", \"smart_parallelized\", \"smart_parallelized_comp\", \"smart_scalar_parallelized\", \"smart_scalar_parallelized_comp\", \
|
||||
\"unchecked\", \"unchecked_comp\", \"unchecked_scalar\", \"unchecked_scalar_comp\", \
|
||||
\"misc\"]" >> ${GITHUB_ENV}
|
||||
|
||||
- name: Set operation flavor output
|
||||
id: set_op_flavor
|
||||
run: |
|
||||
echo "op_flavor=${{ toJSON(env.OP_FLAVOR) }}" >> ${GITHUB_OUTPUT}
|
||||
|
||||
integer-benchmarks:
|
||||
name: Execute integer benchmarks for all operations flavor
|
||||
needs: prepare-matrix
|
||||
runs-on: ${{ github.event.inputs.runner_name }}
|
||||
if: ${{ !cancelled() }}
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
max-parallel: 1
|
||||
matrix:
|
||||
command: [ integer, integer_multi_bit]
|
||||
op_flavor: ${{ fromJson(needs.prepare-matrix.outputs.op_flavor) }}
|
||||
steps:
|
||||
- name: Instance configuration used
|
||||
run: |
|
||||
echo "IDs: ${{ inputs.instance_id }}"
|
||||
echo "AMI: ${{ inputs.instance_image_id }}"
|
||||
echo "Type: ${{ inputs.instance_type }}"
|
||||
echo "Request ID: ${{ inputs.request_id }}"
|
||||
|
||||
- name: Checkout tfhe-rs repo with tags
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get benchmark details
|
||||
run: |
|
||||
echo "BENCH_DATE=$(date --iso-8601=seconds)" >> "${GITHUB_ENV}"
|
||||
echo "COMMIT_DATE=$(git --no-pager show -s --format=%cd --date=iso8601-strict ${{ github.sha }})" >> "${GITHUB_ENV}"
|
||||
echo "COMMIT_HASH=$(git describe --tags --dirty)" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Set up home
|
||||
# "Install rust" step require root user to have a HOME directory which is not set.
|
||||
run: |
|
||||
echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
|
||||
- name: Checkout Slab repo
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: zama-ai/slab
|
||||
path: slab
|
||||
token: ${{ secrets.CONCRETE_ACTIONS_TOKEN }}
|
||||
|
||||
- name: Run benchmarks with AVX512
|
||||
run: |
|
||||
make AVX512_SUPPORT=ON BENCH_OP_FLAVOR=${{ matrix.op_flavor }} bench_${{ matrix.command }}
|
||||
|
||||
- name: Parse results
|
||||
run: |
|
||||
python3 ./ci/benchmark_parser.py target/criterion ${{ env.RESULTS_FILENAME }} \
|
||||
--database tfhe_rs \
|
||||
--hardware ${{ inputs.instance_type }} \
|
||||
--project-version "${{ env.COMMIT_HASH }}" \
|
||||
--branch ${{ github.ref_name }} \
|
||||
--commit-date "${{ env.COMMIT_DATE }}" \
|
||||
--bench-date "${{ env.BENCH_DATE }}" \
|
||||
--walk-subdirs \
|
||||
--name-suffix avx512 \
|
||||
--throughput
|
||||
|
||||
- name: Upload parsed results artifact
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: ${{ github.sha }}_${{ matrix.command }}_${{ matrix.op_flavor }}
|
||||
path: ${{ env.RESULTS_FILENAME }}
|
||||
|
||||
- name: Send data to Slab
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Computing HMac on results file"
|
||||
SIGNATURE="$(slab/scripts/hmac_calculator.sh ${{ env.RESULTS_FILENAME }} '${{ secrets.JOB_SECRET }}')"
|
||||
echo "Sending results to Slab..."
|
||||
curl -v -k \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-Slab-Repository: ${{ github.repository }}" \
|
||||
-H "X-Slab-Command: store_data_v2" \
|
||||
-H "X-Hub-Signature-256: sha256=${SIGNATURE}" \
|
||||
-d @${{ env.RESULTS_FILENAME }} \
|
||||
${{ secrets.SLAB_URL }}
|
||||
|
||||
slack-notification:
|
||||
name: Slack Notification
|
||||
runs-on: ${{ github.event.inputs.runner_name }}
|
||||
if: ${{ failure() }}
|
||||
needs: integer-benchmarks
|
||||
steps:
|
||||
- name: Notify
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "Integer full benchmarks failed. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
129
.github/workflows/integer_multi_bit_benchmark.yml
vendored
Normal file
129
.github/workflows/integer_multi_bit_benchmark.yml
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
# Run integer benchmarks with multi-bit cryptographic parameters on an AWS instance and return parsed results to Slab CI bot.
|
||||
name: Integer Multi-bit benchmarks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
instance_id:
|
||||
description: "Instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: "Instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: "Instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: "Slab request ID"
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json
|
||||
PARSE_INTEGER_BENCH_CSV_FILE: tfhe_rs_integer_benches_${{ github.sha }}.csv
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
run-integer-benchmarks:
|
||||
name: Execute integer multi-bit benchmarks in EC2
|
||||
runs-on: ${{ github.event.inputs.runner_name }}
|
||||
if: ${{ !cancelled() }}
|
||||
steps:
|
||||
- name: Instance configuration used
|
||||
run: |
|
||||
echo "IDs: ${{ inputs.instance_id }}"
|
||||
echo "AMI: ${{ inputs.instance_image_id }}"
|
||||
echo "Type: ${{ inputs.instance_type }}"
|
||||
echo "Request ID: ${{ inputs.request_id }}"
|
||||
|
||||
- name: Get benchmark date
|
||||
run: |
|
||||
echo "BENCH_DATE=$(date --iso-8601=seconds)" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Checkout tfhe-rs repo with tags
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up home
|
||||
# "Install rust" step require root user to have a HOME directory which is not set.
|
||||
run: |
|
||||
echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
|
||||
- name: Run multi-bit benchmarks with AVX512
|
||||
run: |
|
||||
make AVX512_SUPPORT=ON FAST_BENCH=TRUE bench_integer_multi_bit
|
||||
|
||||
- name: Parse benchmarks to csv
|
||||
run: |
|
||||
make PARSE_INTEGER_BENCH_CSV_FILE=${{ env.PARSE_INTEGER_BENCH_CSV_FILE }} \
|
||||
parse_integer_benches
|
||||
|
||||
- name: Upload csv results artifact
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: ${{ github.sha }}_csv_integer
|
||||
path: ${{ env.PARSE_INTEGER_BENCH_CSV_FILE }}
|
||||
|
||||
- name: Parse results
|
||||
run: |
|
||||
COMMIT_DATE="$(git --no-pager show -s --format=%cd --date=iso8601-strict ${{ github.sha }})"
|
||||
COMMIT_HASH="$(git describe --tags --dirty)"
|
||||
python3 ./ci/benchmark_parser.py target/criterion ${{ env.RESULTS_FILENAME }} \
|
||||
--database tfhe_rs \
|
||||
--hardware ${{ inputs.instance_type }} \
|
||||
--project-version "${COMMIT_HASH}" \
|
||||
--branch ${{ github.ref_name }} \
|
||||
--commit-date "${COMMIT_DATE}" \
|
||||
--bench-date "${{ env.BENCH_DATE }}" \
|
||||
--walk-subdirs \
|
||||
--name-suffix avx512 \
|
||||
--throughput
|
||||
|
||||
- name: Upload parsed results artifact
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: ${{ github.sha }}_integer
|
||||
path: ${{ env.RESULTS_FILENAME }}
|
||||
|
||||
- name: Checkout Slab repo
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: zama-ai/slab
|
||||
path: slab
|
||||
token: ${{ secrets.CONCRETE_ACTIONS_TOKEN }}
|
||||
|
||||
- name: Send data to Slab
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Computing HMac on results file"
|
||||
SIGNATURE="$(slab/scripts/hmac_calculator.sh ${{ env.RESULTS_FILENAME }} '${{ secrets.JOB_SECRET }}')"
|
||||
echo "Sending results to Slab..."
|
||||
curl -v -k \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-Slab-Repository: ${{ github.repository }}" \
|
||||
-H "X-Slab-Command: store_data_v2" \
|
||||
-H "X-Hub-Signature-256: sha256=${SIGNATURE}" \
|
||||
-d @${{ env.RESULTS_FILENAME }} \
|
||||
${{ secrets.SLAB_URL }}
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ failure() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "Integer benchmarks failed. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
36
.github/workflows/m1_tests.yml
vendored
36
.github/workflows/m1_tests.yml
vendored
@@ -4,11 +4,19 @@ on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
# Have a nightly build for M1 tests
|
||||
schedule:
|
||||
# * is a special character in YAML so you have to quote this string
|
||||
# At 22:00 every day
|
||||
# Timezone is UTC, so Paris time is +2 during the summer and +1 during winter
|
||||
- cron: "0 22 * * *"
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTFLAGS: "-C target-cpu=native"
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
CARGO_PROFILE: release_lto_off
|
||||
FAST_TESTS: "TRUE"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref }}
|
||||
@@ -16,11 +24,11 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
cargo-builds:
|
||||
if: "github.event_name != 'pull_request' || contains(github.event.label.name, 'm1_test')"
|
||||
if: ${{ (github.event_name == 'schedule' && github.repository == 'zama-ai/tfhe-rs') || github.event_name == 'workflow_dispatch' || contains(github.event.label.name, 'm1_test') }}
|
||||
runs-on: ["self-hosted", "m1mac"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
|
||||
- name: Install latest stable
|
||||
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af
|
||||
@@ -32,6 +40,10 @@ jobs:
|
||||
run: |
|
||||
make pcc
|
||||
|
||||
- name: Build concrete-csprng
|
||||
run: |
|
||||
make build_concrete_csprng
|
||||
|
||||
- name: Build Release core
|
||||
run: |
|
||||
make build_core
|
||||
@@ -56,6 +68,10 @@ jobs:
|
||||
run: |
|
||||
make build_c_api
|
||||
|
||||
- name: Run concrete-csprng tests
|
||||
run: |
|
||||
make test_concrete_csprng
|
||||
|
||||
- name: Run core tests
|
||||
run: |
|
||||
make test_core_crypto
|
||||
@@ -87,6 +103,19 @@ jobs:
|
||||
run: |
|
||||
make test_integer_ci
|
||||
|
||||
- name: Gen Keys if required
|
||||
run: |
|
||||
make GEN_KEY_CACHE_MULTI_BIT_ONLY=TRUE gen_key_cache
|
||||
|
||||
- name: Run shortint multi bit tests
|
||||
run: |
|
||||
make test_shortint_multi_bit_ci
|
||||
|
||||
# # These multi bit integer tests are too slow on M1 with low core count and low RAM
|
||||
# - name: Run integer multi bit tests
|
||||
# run: |
|
||||
# make test_integer_multi_bit_ci
|
||||
|
||||
remove_label:
|
||||
name: Remove m1_test label
|
||||
runs-on: ubuntu-latest
|
||||
@@ -95,6 +124,7 @@ jobs:
|
||||
if: ${{ always() }}
|
||||
steps:
|
||||
- uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
with:
|
||||
labels: m1_test
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -102,7 +132,7 @@ jobs:
|
||||
- name: Slack Notification
|
||||
if: ${{ needs.cargo-builds.result != 'skipped' }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ needs.cargo-builds.result }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
|
||||
84
.github/workflows/make_release.yml
vendored
Normal file
84
.github/workflows/make_release.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
# Publish new release of tfhe-rs on various platform.
|
||||
name: Publish release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dry_run:
|
||||
description: "Dry-run"
|
||||
type: boolean
|
||||
default: true
|
||||
push_to_crates:
|
||||
description: "Push to crate"
|
||||
type: boolean
|
||||
default: true
|
||||
push_web_package:
|
||||
description: "Push web js package"
|
||||
type: boolean
|
||||
default: true
|
||||
push_node_package:
|
||||
description: "Push node js package"
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
env:
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
publish_release:
|
||||
name: Publish Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Publish crate.io package
|
||||
if: ${{ inputs.push_to_crates }}
|
||||
env:
|
||||
CRATES_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
DRY_RUN: ${{ inputs.dry_run && '--dry-run' || '' }}
|
||||
run: |
|
||||
cargo publish -p tfhe --token ${{ env.CRATES_TOKEN }} ${{ env.DRY_RUN }}
|
||||
|
||||
- name: Build web package
|
||||
if: ${{ inputs.push_web_package }}
|
||||
run: |
|
||||
make build_web_js_api
|
||||
|
||||
- name: Publish web package
|
||||
if: ${{ inputs.push_web_package }}
|
||||
uses: JS-DevTools/npm-publish@fe72237be0920f7a0cafd6a966c9b929c9466e9b
|
||||
with:
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
package: tfhe/pkg/package.json
|
||||
dry-run: ${{ inputs.dry_run }}
|
||||
|
||||
- name: Build Node package
|
||||
if: ${{ inputs.push_node_package }}
|
||||
run: |
|
||||
rm -rf tfhe/pkg
|
||||
|
||||
make build_node_js_api
|
||||
sed -i 's/"tfhe"/"node-tfhe"/g' tfhe/pkg/package.json
|
||||
|
||||
- name: Publish Node package
|
||||
if: ${{ inputs.push_node_package }}
|
||||
uses: JS-DevTools/npm-publish@fe72237be0920f7a0cafd6a966c9b929c9466e9b
|
||||
with:
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
package: tfhe/pkg/package.json
|
||||
dry-run: ${{ inputs.dry_run }}
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ failure() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "tfhe release failed: (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
42
.github/workflows/make_release_concrete_csprng.yml
vendored
Normal file
42
.github/workflows/make_release_concrete_csprng.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
# Publish new release of tfhe-rs on various platform.
|
||||
name: Publish concrete-csprng release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dry_run:
|
||||
description: "Dry-run"
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
env:
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
publish_release:
|
||||
name: Publish concrete-csprng Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Publish crate.io package
|
||||
env:
|
||||
CRATES_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
DRY_RUN: ${{ inputs.dry_run && '--dry-run' || '' }}
|
||||
run: |
|
||||
cargo publish -p concrete-csprng --token ${{ env.CRATES_TOKEN }} ${{ env.DRY_RUN }}
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ failure() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "concrete-csprng release failed: (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
51
.github/workflows/parameters_check.yml
vendored
Normal file
51
.github/workflows/parameters_check.yml
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
# Perform a security check on all the cryptographic parameters set
|
||||
name: Parameters curves security check
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
RUSTFLAGS: "-C target-cpu=native"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "main"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
params-curves-security-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout tfhe-rs
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
|
||||
- name: Checkout lattice-estimator
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: malb/lattice-estimator
|
||||
path: lattice_estimator
|
||||
|
||||
- name: Install Sage
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y sagemath
|
||||
|
||||
- name: Collect parameters
|
||||
run: |
|
||||
CARGO_PROFILE=devo make write_params_to_file
|
||||
|
||||
- name: Perform security check
|
||||
run: |
|
||||
PYTHONPATH=lattice_estimator sage ci/lattice_estimator.sage
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ always() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "Security check for parameters finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
38
.github/workflows/pbs_benchmark.yml
vendored
38
.github/workflows/pbs_benchmark.yml
vendored
@@ -5,25 +5,33 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
instance_id:
|
||||
description: 'Instance ID'
|
||||
description: "Instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: 'Instance AMI ID'
|
||||
description: "Instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: 'Instance product type'
|
||||
description: "Instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: 'Action runner name'
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
description: "Slab request ID"
|
||||
type: string
|
||||
|
||||
# This input is not used in this workflow but still mandatory since a calling workflow could
|
||||
# use it. If a triggering command include a user_inputs field, then the triggered workflow
|
||||
# must include this very input, otherwise the workflow won't be called.
|
||||
# See start_full_benchmarks.yml as example.
|
||||
user_inputs:
|
||||
description: "Type of benchmarks to run"
|
||||
type: string
|
||||
default: "weekly_benchmarks"
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
run-pbs-benchmarks:
|
||||
@@ -43,7 +51,7 @@ jobs:
|
||||
echo "BENCH_DATE=$(date --iso-8601=seconds)" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Checkout tfhe-rs repo with tags
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -78,13 +86,13 @@ jobs:
|
||||
--throughput
|
||||
|
||||
- name: Upload parsed results artifact
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: ${{ github.sha }}_pbs
|
||||
path: ${{ env.RESULTS_FILENAME }}
|
||||
|
||||
- name: Checkout Slab repo
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: zama-ai/slab
|
||||
path: slab
|
||||
@@ -103,3 +111,15 @@ jobs:
|
||||
-H "X-Hub-Signature-256: sha256=${SIGNATURE}" \
|
||||
-d @${{ env.RESULTS_FILENAME }} \
|
||||
${{ secrets.SLAB_URL }}
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ failure() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "PBS benchmarks failed. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
|
||||
14
.github/workflows/placeholder_workflow.yml
vendored
Normal file
14
.github/workflows/placeholder_workflow.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# Placeholder workflow file allowing running it without having to merge to main first
|
||||
name: Placeholder Workflow
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
placeholder:
|
||||
name: Placeholder
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- run: |
|
||||
echo "Hello this is a Placeholder Workflow"
|
||||
52
.github/workflows/shortint_benchmark.yml
vendored
52
.github/workflows/shortint_benchmark.yml
vendored
@@ -5,25 +5,25 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
instance_id:
|
||||
description: 'Instance ID'
|
||||
description: "Instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: 'Instance AMI ID'
|
||||
description: "Instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: 'Instance product type'
|
||||
description: "Instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: 'Action runner name'
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: 'Slab request ID'
|
||||
description: "Slab request ID"
|
||||
type: string
|
||||
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
run-shortint-benchmarks:
|
||||
@@ -43,7 +43,7 @@ jobs:
|
||||
echo "BENCH_DATE=$(date --iso-8601=seconds)" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Checkout tfhe-rs repo with tags
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -58,9 +58,9 @@ jobs:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
|
||||
- name: Run benchmarks
|
||||
- name: Run benchmarks with AVX512
|
||||
run: |
|
||||
make bench_shortint
|
||||
make AVX512_SUPPORT=ON bench_shortint
|
||||
|
||||
- name: Parse results
|
||||
run: |
|
||||
@@ -74,24 +74,8 @@ jobs:
|
||||
--commit-date "${COMMIT_DATE}" \
|
||||
--bench-date "${{ env.BENCH_DATE }}" \
|
||||
--walk-subdirs \
|
||||
--throughput
|
||||
|
||||
- name: Remove previous raw results
|
||||
run: |
|
||||
rm -rf target/criterion
|
||||
|
||||
- name: Run benchmarks with AVX512
|
||||
run: |
|
||||
make AVX512_SUPPORT=ON bench_shortint
|
||||
|
||||
- name: Parse AVX512 results
|
||||
run: |
|
||||
python3 ./ci/benchmark_parser.py target/criterion ${{ env.RESULTS_FILENAME }} \
|
||||
--hardware ${{ inputs.instance_type }} \
|
||||
--walk-subdirs \
|
||||
--name-suffix avx512 \
|
||||
--throughput \
|
||||
--append-results
|
||||
--throughput
|
||||
|
||||
- name: Measure key sizes
|
||||
run: |
|
||||
@@ -104,13 +88,13 @@ jobs:
|
||||
--append-results
|
||||
|
||||
- name: Upload parsed results artifact
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: ${{ github.sha }}_shortint
|
||||
path: ${{ env.RESULTS_FILENAME }}
|
||||
|
||||
- name: Checkout Slab repo
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: zama-ai/slab
|
||||
path: slab
|
||||
@@ -129,3 +113,15 @@ jobs:
|
||||
-H "X-Hub-Signature-256: sha256=${SIGNATURE}" \
|
||||
-d @${{ env.RESULTS_FILENAME }} \
|
||||
${{ secrets.SLAB_URL }}
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ failure() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "Shortint benchmarks failed. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
|
||||
149
.github/workflows/shortint_full_benchmark.yml
vendored
Normal file
149
.github/workflows/shortint_full_benchmark.yml
vendored
Normal file
@@ -0,0 +1,149 @@
|
||||
# Run all shortint benchmarks on an AWS instance and return parsed results to Slab CI bot.
|
||||
name: Shortint full benchmarks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
instance_id:
|
||||
description: "Instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: "Instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: "Instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: "Slab request ID"
|
||||
type: string
|
||||
# This input is not used in this workflow but still mandatory since a calling workflow could
|
||||
# use it. If a triggering command include a user_inputs field, then the triggered workflow
|
||||
# must include this very input, otherwise the workflow won't be called.
|
||||
# See start_full_benchmarks.yml as example.
|
||||
user_inputs:
|
||||
description: "Type of benchmarks to run"
|
||||
type: string
|
||||
default: "weekly_benchmarks"
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
shortint-benchmarks:
|
||||
name: Execute shortint benchmarks for all operations flavor
|
||||
runs-on: ${{ github.event.inputs.runner_name }}
|
||||
if: ${{ !cancelled() }}
|
||||
strategy:
|
||||
max-parallel: 1
|
||||
matrix:
|
||||
op_flavor: [ default, smart, unchecked ]
|
||||
steps:
|
||||
- name: Instance configuration used
|
||||
run: |
|
||||
echo "IDs: ${{ inputs.instance_id }}"
|
||||
echo "AMI: ${{ inputs.instance_image_id }}"
|
||||
echo "Type: ${{ inputs.instance_type }}"
|
||||
echo "Request ID: ${{ inputs.request_id }}"
|
||||
|
||||
- name: Checkout tfhe-rs repo with tags
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get benchmark details
|
||||
run: |
|
||||
echo "BENCH_DATE=$(date --iso-8601=seconds)" >> "${GITHUB_ENV}"
|
||||
echo "COMMIT_DATE=$(git --no-pager show -s --format=%cd --date=iso8601-strict ${{ github.sha }})" >> "${GITHUB_ENV}"
|
||||
echo "COMMIT_HASH=$(git describe --tags --dirty)" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Set up home
|
||||
# "Install rust" step require root user to have a HOME directory which is not set.
|
||||
run: |
|
||||
echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
|
||||
- name: Checkout Slab repo
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: zama-ai/slab
|
||||
path: slab
|
||||
token: ${{ secrets.CONCRETE_ACTIONS_TOKEN }}
|
||||
|
||||
- name: Run benchmarks with AVX512
|
||||
run: |
|
||||
make AVX512_SUPPORT=ON BENCH_OP_FLAVOR=${{ matrix.op_flavor }} bench_shortint
|
||||
|
||||
- name: Parse results
|
||||
run: |
|
||||
COMMIT_DATE="$(git --no-pager show -s --format=%cd --date=iso8601-strict ${{ github.sha }})"
|
||||
COMMIT_HASH="$(git describe --tags --dirty)"
|
||||
python3 ./ci/benchmark_parser.py target/criterion ${{ env.RESULTS_FILENAME }} \
|
||||
--database tfhe_rs \
|
||||
--hardware ${{ inputs.instance_type }} \
|
||||
--project-version "${COMMIT_HASH}" \
|
||||
--branch ${{ github.ref_name }} \
|
||||
--commit-date "${COMMIT_DATE}" \
|
||||
--bench-date "${{ env.BENCH_DATE }}" \
|
||||
--walk-subdirs \
|
||||
--name-suffix avx512 \
|
||||
--throughput
|
||||
|
||||
# This small benchmark needs to be executed only once.
|
||||
- name: Measure key sizes
|
||||
if: matrix.op_flavor == 'default'
|
||||
run: |
|
||||
make measure_shortint_key_sizes
|
||||
|
||||
- name: Parse key sizes results
|
||||
if: matrix.op_flavor == 'default'
|
||||
run: |
|
||||
python3 ./ci/benchmark_parser.py tfhe/shortint_key_sizes.csv ${{ env.RESULTS_FILENAME }} \
|
||||
--key-sizes \
|
||||
--append-results
|
||||
|
||||
- name: Upload parsed results artifact
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: ${{ github.sha }}_shortint_${{ matrix.op_flavor }}
|
||||
path: ${{ env.RESULTS_FILENAME }}
|
||||
|
||||
- name: Send data to Slab
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Computing HMac on results file"
|
||||
SIGNATURE="$(slab/scripts/hmac_calculator.sh ${{ env.RESULTS_FILENAME }} '${{ secrets.JOB_SECRET }}')"
|
||||
echo "Sending results to Slab..."
|
||||
curl -v -k \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-Slab-Repository: ${{ github.repository }}" \
|
||||
-H "X-Slab-Command: store_data_v2" \
|
||||
-H "X-Hub-Signature-256: sha256=${SIGNATURE}" \
|
||||
-d @${{ env.RESULTS_FILENAME }} \
|
||||
${{ secrets.SLAB_URL }}
|
||||
|
||||
slack-notification:
|
||||
name: Slack Notification
|
||||
runs-on: ${{ github.event.inputs.runner_name }}
|
||||
if: ${{ failure() }}
|
||||
needs: shortint-benchmarks
|
||||
steps:
|
||||
- name: Notify
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "Shortint full benchmarks failed. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
79
.github/workflows/start_benchmarks.yml
vendored
79
.github/workflows/start_benchmarks.yml
vendored
@@ -4,24 +4,97 @@ name: Start all benchmarks
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
- "main"
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
# The input name must be the name of the slab command to launch
|
||||
boolean_bench:
|
||||
description: "Run Boolean benches"
|
||||
type: boolean
|
||||
default: true
|
||||
shortint_bench:
|
||||
description: "Run shortint benches"
|
||||
type: boolean
|
||||
default: true
|
||||
integer_bench:
|
||||
description: "Run integer benches"
|
||||
type: boolean
|
||||
default: true
|
||||
integer_multi_bit_bench:
|
||||
description: "Run integer multi bit benches"
|
||||
type: boolean
|
||||
default: true
|
||||
pbs_bench:
|
||||
description: "Run PBS benches"
|
||||
type: boolean
|
||||
default: true
|
||||
wasm_client_bench:
|
||||
description: "Run WASM client benches"
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
jobs:
|
||||
start-benchmarks:
|
||||
if: ${{ (github.event_name == 'push' && github.repository == 'zama-ai/tfhe-rs') || github.event_name == 'workflow_dispatch' }}
|
||||
strategy:
|
||||
matrix:
|
||||
command: [boolean_bench, shortint_bench, integer_bench, pbs_bench]
|
||||
command: [boolean_bench, shortint_bench, integer_bench, integer_multi_bit_bench, pbs_bench, wasm_client_bench]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout tfhe-rs
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check for file changes
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@408093d9ff9c134c33b974e0722ce06b9d6e8263
|
||||
with:
|
||||
files_yaml: |
|
||||
common_benches:
|
||||
- toolchain.txt
|
||||
- Makefile
|
||||
- ci/slab.toml
|
||||
- tfhe/Cargo.toml
|
||||
- tfhe/src/core_crypto/**
|
||||
- .github/workflows/start_benchmarks.yml
|
||||
boolean_bench:
|
||||
- tfhe/src/boolean/**
|
||||
- tfhe/benches/boolean/**
|
||||
- .github/workflows/boolean_benchmark.yml
|
||||
shortint_bench:
|
||||
- tfhe/src/shortint/**
|
||||
- tfhe/benches/shortint/**
|
||||
- .github/workflows/shortint_benchmark.yml
|
||||
integer_bench:
|
||||
- tfhe/src/shortint/**
|
||||
- tfhe/src/integer/**
|
||||
- tfhe/benches/integer/**
|
||||
- .github/workflows/integer_benchmark.yml
|
||||
integer_multi_bit_bench:
|
||||
- tfhe/src/shortint/**
|
||||
- tfhe/src/integer/**
|
||||
- tfhe/benches/integer/**
|
||||
- .github/workflows/integer_benchmark.yml
|
||||
pbs_bench:
|
||||
- tfhe/src/core_crypto/**
|
||||
- tfhe/benches/core_crypto/**
|
||||
- .github/workflows/pbs_benchmark.yml
|
||||
wasm_client_bench:
|
||||
- tfhe/web_wasm_parallel_tests/**
|
||||
- .github/workflows/wasm_client_benchmark.yml
|
||||
|
||||
- name: Checkout Slab repo
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: zama-ai/slab
|
||||
path: slab
|
||||
token: ${{ secrets.CONCRETE_ACTIONS_TOKEN }}
|
||||
|
||||
- name: Start AWS job in Slab
|
||||
# If manually triggered check that the current bench has been requested
|
||||
# Otherwise if it's on push check that files relevant to benchmarks have changed
|
||||
if: (github.event_name == 'workflow_dispatch' && github.event.inputs[matrix.command] == 'true') || (github.event_name == 'push' && (steps.changed-files.outputs.common_benches_any_changed == 'true' || steps.changed-files.outputs[format('{0}_any_changed', matrix.command)] == 'true'))
|
||||
shell: bash
|
||||
run: |
|
||||
echo -n '{"command": "${{ matrix.command }}", "git_ref": "${{ github.ref }}", "sha": "${{ github.sha }}"}' > command.json
|
||||
|
||||
64
.github/workflows/start_full_benchmarks.yml
vendored
Normal file
64
.github/workflows/start_full_benchmarks.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
# Start all benchmark jobs, including full shortint and integer, on Slab CI bot.
|
||||
name: Start full suite benchmarks
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Weekly benchmarks will be triggered each Saturday at 1a.m.
|
||||
- cron: '0 1 * * 6'
|
||||
# Quarterly benchmarks will be triggered right before end of quarter, the 25th of the current month at 4a.m.
|
||||
# These benchmarks are far longer to execute hence the reason to run them only four time a year.
|
||||
- cron: '0 4 25 MAR,JUN,SEP,DEC *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
benchmark_type:
|
||||
description: 'Benchmark type'
|
||||
required: true
|
||||
default: 'weekly'
|
||||
type: choice
|
||||
options:
|
||||
- weekly
|
||||
- quarterly
|
||||
|
||||
jobs:
|
||||
start-benchmarks:
|
||||
if: ${{ (github.event_name == 'schedule' && github.repository == 'zama-ai/tfhe-rs') || github.event_name == 'workflow_dispatch' }}
|
||||
strategy:
|
||||
matrix:
|
||||
command: [ boolean_bench, shortint_full_bench, integer_full_bench, pbs_bench, wasm_client_bench ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout tfhe-rs
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout Slab repo
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: zama-ai/slab
|
||||
path: slab
|
||||
token: ${{ secrets.CONCRETE_ACTIONS_TOKEN }}
|
||||
|
||||
- name: Set benchmarks type as weekly
|
||||
if: (github.event_name == 'workflow_dispatch' && inputs.benchmark_type == 'weekly') || github.event.schedule == '0 1 * * 6'
|
||||
run: |
|
||||
echo "BENCH_TYPE=weekly_benchmarks" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Set benchmarks type as quarterly
|
||||
if: (github.event_name == 'workflow_dispatch' && inputs.benchmark_type == 'quarterly') || github.event.schedule == '0 4 25 MAR,JUN,SEP,DEC *'
|
||||
run: |
|
||||
echo "BENCH_TYPE=quarterly_benchmarks" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Start AWS job in Slab
|
||||
shell: bash
|
||||
run: |
|
||||
echo -n '{"command": "${{ matrix.command }}", "git_ref": "${{ github.ref }}", "sha": "${{ github.sha }}", "user_inputs": "${{ env.BENCH_TYPE }}"}' > command.json
|
||||
SIGNATURE="$(slab/scripts/hmac_calculator.sh command.json '${{ secrets.JOB_SECRET }}')"
|
||||
curl -v -k \
|
||||
--fail-with-body \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-Slab-Repository: ${{ github.repository }}" \
|
||||
-H "X-Slab-Command: start_aws" \
|
||||
-H "X-Hub-Signature-256: sha256=${SIGNATURE}" \
|
||||
-d @command.json \
|
||||
${{ secrets.SLAB_URL }}
|
||||
4
.github/workflows/sync_on_push.yml
vendored
4
.github/workflows/sync_on_push.yml
vendored
@@ -13,11 +13,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Save repo
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: repo-archive
|
||||
path: '.'
|
||||
|
||||
40
.github/workflows/trigger_aws_tests_on_pr.yml
vendored
40
.github/workflows/trigger_aws_tests_on_pr.yml
vendored
@@ -3,16 +3,52 @@ name: PR AWS build trigger
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
trigger-tests:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: mshick/add-pr-comment@a65df5f64fc741e91c59b8359a4bc56e57aaf5b1
|
||||
- name: Get current labels
|
||||
uses: snnaplab/get-labels-action@f426df40304808ace3b5282d4f036515f7609576
|
||||
|
||||
- name: Remove approved label
|
||||
if: ${{ github.event_name == 'pull_request' && contains(fromJSON(env.LABELS), 'approved') }}
|
||||
uses: actions-ecosystem/action-remove-labels@2ce5d41b4b6aa8503e285553f75ed56e0a40bae0
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
labels: approved
|
||||
|
||||
- name: Launch fast tests
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
uses: mshick/add-pr-comment@a65df5f64fc741e91c59b8359a4bc56e57aaf5b1
|
||||
with:
|
||||
allow-repeats: true
|
||||
message: |
|
||||
@slab-ci cpu_fast_test
|
||||
|
||||
- name: Add approved label
|
||||
uses: actions-ecosystem/action-add-labels@18f1af5e3544586314bbe15c0273249c770b2daf
|
||||
if: ${{ github.event_name == 'pull_request_review' && github.event.review.state == 'approved' && !contains(fromJSON(env.LABELS), 'approved') }}
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
labels: approved
|
||||
|
||||
# PR label 'approved' presence is checked to avoid running the full test suite several times
|
||||
# in case of multiple approvals without new commits in between.
|
||||
- name: Launch full tests suite
|
||||
if: ${{ github.event_name == 'pull_request_review' && github.event.review.state == 'approved' && !contains(fromJSON(env.LABELS), 'approved') }}
|
||||
uses: mshick/add-pr-comment@a65df5f64fc741e91c59b8359a4bc56e57aaf5b1
|
||||
with:
|
||||
allow-repeats: true
|
||||
message: |
|
||||
Pull Request has been approved :tada:
|
||||
Launching full test suite...
|
||||
@slab-ci cpu_test
|
||||
@slab-ci cpu_integer_test
|
||||
@slab-ci cpu_multi_bit_test
|
||||
@slab-ci cpu_wasm_test
|
||||
@slab-ci csprng_randomness_testing
|
||||
|
||||
136
.github/workflows/wasm_client_benchmark.yml
vendored
Normal file
136
.github/workflows/wasm_client_benchmark.yml
vendored
Normal file
@@ -0,0 +1,136 @@
|
||||
# Run WASM client benchmarks on an AWS instance and return parsed results to Slab CI bot.
|
||||
name: WASM client benchmarks
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
instance_id:
|
||||
description: "Instance ID"
|
||||
type: string
|
||||
instance_image_id:
|
||||
description: "Instance AMI ID"
|
||||
type: string
|
||||
instance_type:
|
||||
description: "Instance product type"
|
||||
type: string
|
||||
runner_name:
|
||||
description: "Action runner name"
|
||||
type: string
|
||||
request_id:
|
||||
description: "Slab request ID"
|
||||
type: string
|
||||
# This input is not used in this workflow but still mandatory since a calling workflow could
|
||||
# use it. If a triggering command include a user_inputs field, then the triggered workflow
|
||||
# must include this very input, otherwise the workflow won't be called.
|
||||
# See start_full_benchmarks.yml as example.
|
||||
user_inputs:
|
||||
description: "Type of benchmarks to run"
|
||||
type: string
|
||||
default: "weekly_benchmarks"
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json
|
||||
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
run-wasm-client-benchmarks:
|
||||
name: Execute WASM client benchmarks in EC2
|
||||
runs-on: ${{ github.event.inputs.runner_name }}
|
||||
if: ${{ !cancelled() }}
|
||||
steps:
|
||||
- name: Instance configuration used
|
||||
run: |
|
||||
echo "IDs: ${{ inputs.instance_id }}"
|
||||
echo "AMI: ${{ inputs.instance_image_id }}"
|
||||
echo "Type: ${{ inputs.instance_type }}"
|
||||
echo "Request ID: ${{ inputs.request_id }}"
|
||||
|
||||
- name: Get benchmark date
|
||||
run: |
|
||||
echo "BENCH_DATE=$(date --iso-8601=seconds)" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Checkout tfhe-rs repo with tags
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up home
|
||||
# "Install rust" step require root user to have a HOME directory which is not set.
|
||||
run: |
|
||||
echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}"
|
||||
|
||||
- name: Install rust
|
||||
uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
|
||||
- name: Run benchmarks
|
||||
run: |
|
||||
make install_node
|
||||
make ci_bench_web_js_api_parallel
|
||||
|
||||
- name: Parse results
|
||||
run: |
|
||||
make parse_wasm_benchmarks
|
||||
|
||||
COMMIT_DATE="$(git --no-pager show -s --format=%cd --date=iso8601-strict ${{ github.sha }})"
|
||||
COMMIT_HASH="$(git describe --tags --dirty)"
|
||||
python3 ./ci/benchmark_parser.py tfhe/wasm_pk_gen.csv ${{ env.RESULTS_FILENAME }} \
|
||||
--database tfhe_rs \
|
||||
--hardware ${{ inputs.instance_type }} \
|
||||
--project-version "${COMMIT_HASH}" \
|
||||
--branch ${{ github.ref_name }} \
|
||||
--commit-date "${COMMIT_DATE}" \
|
||||
--bench-date "${{ env.BENCH_DATE }}" \
|
||||
--key-gen
|
||||
|
||||
- name: Measure public key and ciphertext sizes in HL Api
|
||||
run: |
|
||||
make measure_hlapi_compact_pk_ct_sizes
|
||||
|
||||
- name: Parse key and ciphertext sizes results
|
||||
run: |
|
||||
python3 ./ci/benchmark_parser.py tfhe/hlapi_cpk_and_cctl_sizes.csv ${{ env.RESULTS_FILENAME }} \
|
||||
--key-gen \
|
||||
--append-results
|
||||
|
||||
- name: Upload parsed results artifact
|
||||
uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32
|
||||
with:
|
||||
name: ${{ github.sha }}_wasm
|
||||
path: ${{ env.RESULTS_FILENAME }}
|
||||
|
||||
- name: Checkout Slab repo
|
||||
uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
|
||||
with:
|
||||
repository: zama-ai/slab
|
||||
path: slab
|
||||
token: ${{ secrets.CONCRETE_ACTIONS_TOKEN }}
|
||||
|
||||
- name: Send data to Slab
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Computing HMac on results file"
|
||||
SIGNATURE="$(slab/scripts/hmac_calculator.sh ${{ env.RESULTS_FILENAME }} '${{ secrets.JOB_SECRET }}')"
|
||||
echo "Sending results to Slab..."
|
||||
curl -v -k \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "X-Slab-Repository: ${{ github.repository }}" \
|
||||
-H "X-Slab-Command: store_data_v2" \
|
||||
-H "X-Hub-Signature-256: sha256=${SIGNATURE}" \
|
||||
-d @${{ env.RESULTS_FILENAME }} \
|
||||
${{ secrets.SLAB_URL }}
|
||||
|
||||
- name: Slack Notification
|
||||
if: ${{ failure() }}
|
||||
continue-on-error: true
|
||||
uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8
|
||||
env:
|
||||
SLACK_COLOR: ${{ job.status }}
|
||||
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
|
||||
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
|
||||
SLACK_MESSAGE: "WASM benchmarks failed. (${{ env.ACTION_RUN_URL }})"
|
||||
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -12,3 +12,10 @@ target/
|
||||
|
||||
# Some of our bench outputs
|
||||
/tfhe/benchmarks_parameters
|
||||
**/*.csv
|
||||
|
||||
# dieharder run log
|
||||
dieharder_run.log
|
||||
|
||||
# Coverage reports
|
||||
./coverage/
|
||||
|
||||
14
.linelint.yml
Normal file
14
.linelint.yml
Normal file
@@ -0,0 +1,14 @@
|
||||
ignore:
|
||||
- .git
|
||||
- target
|
||||
- tfhe/benchmarks_parameters
|
||||
- tfhe/web_wasm_parallel_tests/node_modules
|
||||
- tfhe/web_wasm_parallel_tests/dist
|
||||
- keys
|
||||
- coverage
|
||||
|
||||
rules:
|
||||
# checks if file ends in a newline character
|
||||
end-of-file:
|
||||
enable: true
|
||||
single-new-line: true
|
||||
131
CODE_OF_CONDUCT.md
Normal file
131
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,131 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, caste, color, religion, or sexual
|
||||
identity and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
- Focusing on what is best not just for us as individuals, but for the overall
|
||||
community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
- The use of sexualized language or imagery, and sexual attention or advances of
|
||||
any kind
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email address,
|
||||
without their explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting us anonymously through [this form](https://forms.gle/569j3cZqGRFgrR3u9).
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series of
|
||||
actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or permanent
|
||||
ban.
|
||||
|
||||
### 3. Temporary ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within the
|
||||
community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.1, available at
|
||||
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
|
||||
|
||||
Community Impact Guidelines were inspired by
|
||||
[Mozilla's code of conduct enforcement ladder][mozilla coc].
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
[https://www.contributor-covenant.org/faq][faq]. Translations are available at
|
||||
[https://www.contributor-covenant.org/translations][translations].
|
||||
|
||||
[faq]: https://www.contributor-covenant.org/faq
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
[mozilla coc]: https://github.com/mozilla/diversity
|
||||
[translations]: https://www.contributor-covenant.org/translations
|
||||
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
|
||||
@@ -1,6 +1,6 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = ["tfhe", "tasks"]
|
||||
members = ["tfhe", "tasks", "apps/trivium", "concrete-csprng"]
|
||||
|
||||
[profile.bench]
|
||||
lto = "fat"
|
||||
@@ -8,6 +8,10 @@ lto = "fat"
|
||||
[profile.release]
|
||||
lto = "fat"
|
||||
|
||||
[profile.release_lto_off]
|
||||
inherits = "release"
|
||||
lto = "off"
|
||||
|
||||
# Compiles much faster for tests and allows reasonable performance for iterating
|
||||
[profile.devo]
|
||||
inherits = "dev"
|
||||
|
||||
381
Makefile
381
Makefile
@@ -3,14 +3,19 @@ OS:=$(shell uname)
|
||||
RS_CHECK_TOOLCHAIN:=$(shell cat toolchain.txt | tr -d '\n')
|
||||
CARGO_RS_CHECK_TOOLCHAIN:=+$(RS_CHECK_TOOLCHAIN)
|
||||
TARGET_ARCH_FEATURE:=$(shell ./scripts/get_arch_feature.sh)
|
||||
RS_BUILD_TOOLCHAIN:=$(shell \
|
||||
( (echo $(TARGET_ARCH_FEATURE) | grep -q x86) && echo stable) || echo $(RS_CHECK_TOOLCHAIN))
|
||||
RS_BUILD_TOOLCHAIN:=stable
|
||||
CARGO_RS_BUILD_TOOLCHAIN:=+$(RS_BUILD_TOOLCHAIN)
|
||||
CARGO_PROFILE?=release
|
||||
MIN_RUST_VERSION:=1.65
|
||||
MIN_RUST_VERSION:=$(shell grep rust-version tfhe/Cargo.toml | cut -d '=' -f 2 | xargs)
|
||||
AVX512_SUPPORT?=OFF
|
||||
WASM_RUSTFLAGS:=
|
||||
BIG_TESTS_INSTANCE?=FALSE
|
||||
GEN_KEY_CACHE_MULTI_BIT_ONLY?=FALSE
|
||||
GEN_KEY_CACHE_COVERAGE_ONLY?=FALSE
|
||||
PARSE_INTEGER_BENCH_CSV_FILE?=tfhe_rs_integer_benches.csv
|
||||
FAST_TESTS?=FALSE
|
||||
FAST_BENCH?=FALSE
|
||||
BENCH_OP_FLAVOR?=DEFAULT
|
||||
# This is done to avoid forgetting it, we still precise the RUSTFLAGS in the commands to be able to
|
||||
# copy paste the command in the terminal and change them if required without forgetting the flags
|
||||
export RUSTFLAGS?=-C target-cpu=native
|
||||
@@ -21,6 +26,38 @@ else
|
||||
AVX512_FEATURE=
|
||||
endif
|
||||
|
||||
ifeq ($(GEN_KEY_CACHE_MULTI_BIT_ONLY),TRUE)
|
||||
MULTI_BIT_ONLY=--multi-bit-only
|
||||
else
|
||||
MULTI_BIT_ONLY=
|
||||
endif
|
||||
|
||||
ifeq ($(GEN_KEY_CACHE_COVERAGE_ONLY),TRUE)
|
||||
COVERAGE_ONLY=--coverage-only
|
||||
else
|
||||
COVERAGE_ONLY=
|
||||
endif
|
||||
|
||||
# Variables used only for regex_engine example
|
||||
REGEX_STRING?=''
|
||||
REGEX_PATTERN?=''
|
||||
|
||||
# Exclude these files from coverage reports
|
||||
define COVERAGE_EXCLUDED_FILES
|
||||
--exclude-files apps/trivium/src/trivium/* \
|
||||
--exclude-files apps/trivium/src/kreyvium/* \
|
||||
--exclude-files apps/trivium/src/static_deque/* \
|
||||
--exclude-files apps/trivium/src/trans_ciphering/* \
|
||||
--exclude-files tasks/src/* \
|
||||
--exclude-files tfhe/benches/boolean/* \
|
||||
--exclude-files tfhe/benches/core_crypto/* \
|
||||
--exclude-files tfhe/benches/shortint/* \
|
||||
--exclude-files tfhe/benches/integer/* \
|
||||
--exclude-files tfhe/benches/* \
|
||||
--exclude-files tfhe/examples/regex_engine/* \
|
||||
--exclude-files tfhe/examples/utilities/*
|
||||
endef
|
||||
|
||||
.PHONY: rs_check_toolchain # Echo the rust toolchain used for checks
|
||||
rs_check_toolchain:
|
||||
@echo $(RS_CHECK_TOOLCHAIN)
|
||||
@@ -52,14 +89,55 @@ install_cargo_nextest: install_rs_build_toolchain
|
||||
cargo $(CARGO_RS_BUILD_TOOLCHAIN) install cargo-nextest --locked || \
|
||||
( echo "Unable to install cargo nextest, unknown error." && exit 1 )
|
||||
|
||||
.PHONY: install_wasm_pack # Install wasm-pack to build JS packages
|
||||
install_wasm_pack: install_rs_build_toolchain
|
||||
@wasm-pack --version > /dev/null 2>&1 || \
|
||||
cargo $(CARGO_RS_BUILD_TOOLCHAIN) install wasm-pack || \
|
||||
( echo "Unable to install cargo wasm-pack, unknown error." && exit 1 )
|
||||
|
||||
.PHONY: install_node # Install last version of NodeJS via nvm
|
||||
install_node:
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | $(SHELL)
|
||||
source ~/.bashrc
|
||||
$(SHELL) -i -c 'nvm install node' || \
|
||||
( echo "Unable to install node, unknown error." && exit 1 )
|
||||
|
||||
.PHONY: install_dieharder # Install dieharder for apt distributions or macOS
|
||||
install_dieharder:
|
||||
@dieharder -h > /dev/null 2>&1 || \
|
||||
if [[ "$(OS)" == "Linux" ]]; then \
|
||||
sudo apt update && sudo apt install -y dieharder; \
|
||||
elif [[ "$(OS)" == "Darwin" ]]; then\
|
||||
brew install dieharder; \
|
||||
fi || ( echo "Unable to install dieharder, unknown error." && exit 1 )
|
||||
|
||||
.PHONY: install_tarpaulin # Install tarpaulin to perform code coverage
|
||||
install_tarpaulin: install_rs_build_toolchain
|
||||
@cargo tarpaulin --version > /dev/null 2>&1 || \
|
||||
cargo $(CARGO_RS_BUILD_TOOLCHAIN) install cargo-tarpaulin --locked || \
|
||||
( echo "Unable to install cargo tarpaulin, unknown error." && exit 1 )
|
||||
|
||||
.PHONY: check_linelint_installed # Check if linelint newline linter is installed
|
||||
check_linelint_installed:
|
||||
@printf "\n" | linelint - > /dev/null 2>&1 || \
|
||||
( echo "Unable to locate linelint. Try installing it: https://github.com/fernandrone/linelint/releases" && exit 1 )
|
||||
|
||||
.PHONY: fmt # Format rust code
|
||||
fmt: install_rs_check_toolchain
|
||||
cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" fmt
|
||||
|
||||
.PHONT: check_fmt # Check rust code format
|
||||
.PHONY: check_fmt # Check rust code format
|
||||
check_fmt: install_rs_check_toolchain
|
||||
cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" fmt --check
|
||||
|
||||
.PHONY: fix_newline # Fix newline at end of file issues to be UNIX compliant
|
||||
fix_newline: check_linelint_installed
|
||||
linelint -a .
|
||||
|
||||
.PHONY: check_newline # Check for newline at end of file to be UNIX compliant
|
||||
check_newline: check_linelint_installed
|
||||
linelint .
|
||||
|
||||
.PHONY: clippy_core # Run clippy lints on core_crypto with and without experimental features
|
||||
clippy_core: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" clippy \
|
||||
@@ -99,10 +177,10 @@ clippy_c_api: install_rs_check_toolchain
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean-c-api,shortint-c-api \
|
||||
-p tfhe -- --no-deps -D warnings
|
||||
|
||||
.PHONY: clippy_js_wasm_api # Run clippy lints enabling the boolean, shortint and the js wasm API
|
||||
.PHONY: clippy_js_wasm_api # Run clippy lints enabling the boolean, shortint, integer and the js wasm API
|
||||
clippy_js_wasm_api: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" clippy \
|
||||
--features=boolean-client-js-wasm-api,shortint-client-js-wasm-api \
|
||||
--features=boolean-client-js-wasm-api,shortint-client-js-wasm-api,integer-client-js-wasm-api \
|
||||
-p tfhe -- --no-deps -D warnings
|
||||
|
||||
.PHONY: clippy_tasks # Run clippy lints on helper tasks crate.
|
||||
@@ -110,24 +188,38 @@ clippy_tasks:
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" clippy \
|
||||
-p tasks -- --no-deps -D warnings
|
||||
|
||||
.PHONY: clippy_trivium # Run clippy lints on Trivium app
|
||||
clippy_trivium: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" clippy -p tfhe-trivium \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,integer \
|
||||
-p tfhe -- --no-deps -D warnings
|
||||
|
||||
.PHONY: clippy_all_targets # Run clippy lints on all targets (benches, examples, etc.)
|
||||
clippy_all_targets:
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" clippy --all-targets \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,integer,internal-keycache \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,integer,internal-keycache,safe-deserialization \
|
||||
-p tfhe -- --no-deps -D warnings
|
||||
|
||||
.PHONY: clippy_concrete_csprng # Run clippy lints on concrete-csprng
|
||||
clippy_concrete_csprng:
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" clippy --all-targets \
|
||||
--features=$(TARGET_ARCH_FEATURE) \
|
||||
-p concrete-csprng -- --no-deps -D warnings
|
||||
|
||||
.PHONY: clippy_all # Run all clippy targets
|
||||
clippy_all: clippy clippy_boolean clippy_shortint clippy_integer clippy_all_targets clippy_c_api \
|
||||
clippy_js_wasm_api clippy_tasks clippy_core
|
||||
clippy_js_wasm_api clippy_tasks clippy_core clippy_concrete_csprng clippy_trivium
|
||||
|
||||
.PHONY: clippy_fast # Run main clippy targets
|
||||
clippy_fast: clippy clippy_all_targets clippy_c_api clippy_js_wasm_api clippy_tasks clippy_core
|
||||
clippy_fast: clippy clippy_all_targets clippy_c_api clippy_js_wasm_api clippy_tasks clippy_core \
|
||||
clippy_concrete_csprng
|
||||
|
||||
.PHONY: gen_key_cache # Run the script to generate keys and cache them for shortint tests
|
||||
gen_key_cache: install_rs_build_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) run --profile $(CARGO_PROFILE) \
|
||||
--example generates_test_keys \
|
||||
--features=$(TARGET_ARCH_FEATURE),shortint,internal-keycache -p tfhe
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,internal-keycache -p tfhe -- \
|
||||
$(MULTI_BIT_ONLY) $(COVERAGE_ONLY)
|
||||
|
||||
.PHONY: build_core # Build core_crypto without experimental features
|
||||
build_core: install_rs_build_toolchain install_rs_check_toolchain
|
||||
@@ -167,25 +259,45 @@ build_tfhe_full: install_rs_build_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) build --profile $(CARGO_PROFILE) \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,integer -p tfhe --all-targets
|
||||
|
||||
.PHONY: build_c_api # Build the C API for boolean and shortint
|
||||
.PHONY: build_c_api # Build the C API for boolean, shortint and integer
|
||||
build_c_api: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) build --profile $(CARGO_PROFILE) \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean-c-api,shortint-c-api,high-level-c-api \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean-c-api,shortint-c-api,high-level-c-api,safe-deserialization \
|
||||
-p tfhe
|
||||
|
||||
.PHONY: build_c_api_experimental_deterministic_fft # Build the C API for boolean, shortint and integer with experimental deterministic FFT
|
||||
build_c_api_experimental_deterministic_fft: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) build --profile $(CARGO_PROFILE) \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean-c-api,shortint-c-api,high-level-c-api,safe-deserialization,experimental-force_fft_algo_dif4 \
|
||||
-p tfhe
|
||||
|
||||
.PHONY: build_web_js_api # Build the js API targeting the web browser
|
||||
build_web_js_api: install_rs_build_toolchain
|
||||
build_web_js_api: install_rs_build_toolchain install_wasm_pack
|
||||
cd tfhe && \
|
||||
RUSTFLAGS="$(WASM_RUSTFLAGS)" rustup run "$(RS_BUILD_TOOLCHAIN)" \
|
||||
wasm-pack build --release --target=web \
|
||||
-- --features=boolean-client-js-wasm-api,shortint-client-js-wasm-api
|
||||
-- --features=boolean-client-js-wasm-api,shortint-client-js-wasm-api,integer-client-js-wasm-api
|
||||
|
||||
.PHONY: build_web_js_api_parallel # Build the js API targeting the web browser with parallelism support
|
||||
build_web_js_api_parallel: install_rs_check_toolchain install_wasm_pack
|
||||
cd tfhe && \
|
||||
rustup component add rust-src --toolchain $(RS_CHECK_TOOLCHAIN) && \
|
||||
RUSTFLAGS="$(WASM_RUSTFLAGS) -C target-feature=+atomics,+bulk-memory,+mutable-globals" rustup run $(RS_CHECK_TOOLCHAIN) \
|
||||
wasm-pack build --release --target=web \
|
||||
-- --features=boolean-client-js-wasm-api,shortint-client-js-wasm-api,integer-client-js-wasm-api,parallel-wasm-api \
|
||||
-Z build-std=panic_abort,std
|
||||
|
||||
.PHONY: build_node_js_api # Build the js API targeting nodejs
|
||||
build_node_js_api: install_rs_build_toolchain
|
||||
build_node_js_api: install_rs_build_toolchain install_wasm_pack
|
||||
cd tfhe && \
|
||||
RUSTFLAGS="$(WASM_RUSTFLAGS)" rustup run "$(RS_BUILD_TOOLCHAIN)" \
|
||||
wasm-pack build --release --target=nodejs \
|
||||
-- --features=boolean-client-js-wasm-api,shortint-client-js-wasm-api
|
||||
-- --features=boolean-client-js-wasm-api,shortint-client-js-wasm-api,integer-client-js-wasm-api
|
||||
|
||||
.PHONY: build_concrete_csprng # Build concrete_csprng
|
||||
build_concrete_csprng: install_rs_build_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) build --profile $(CARGO_PROFILE) \
|
||||
--features=$(TARGET_ARCH_FEATURE) -p concrete-csprng --all-targets
|
||||
|
||||
.PHONY: test_core_crypto # Run the tests of the core_crypto module including experimental ones
|
||||
test_core_crypto: install_rs_build_toolchain install_rs_check_toolchain
|
||||
@@ -201,24 +313,73 @@ test_boolean: install_rs_build_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --profile $(CARGO_PROFILE) \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean -p tfhe -- boolean::
|
||||
|
||||
.PHONY: test_c_api # Run the tests for the C API
|
||||
test_c_api: build_c_api
|
||||
.PHONY: test_boolean_cov # Run the tests of the boolean module with code coverage
|
||||
test_boolean_cov: install_rs_check_toolchain install_tarpaulin
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) tarpaulin --profile $(CARGO_PROFILE) \
|
||||
--out xml --output-dir coverage/boolean --line --engine llvm --timeout 500 \
|
||||
$(COVERAGE_EXCLUDED_FILES) \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,internal-keycache,__coverage \
|
||||
-p tfhe -- boolean::
|
||||
|
||||
.PHONY: test_c_api_rs # Run the rust tests for the C API
|
||||
test_c_api_rs: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) test --profile $(CARGO_PROFILE) \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean-c-api,shortint-c-api,high-level-c-api,safe-deserialization \
|
||||
-p tfhe \
|
||||
c_api
|
||||
|
||||
.PHONY: test_c_api_c # Run the C tests for the C API
|
||||
test_c_api_c: build_c_api
|
||||
./scripts/c_api_tests.sh
|
||||
|
||||
.PHONY: test_c_api # Run all the tests for the C API
|
||||
test_c_api: test_c_api_rs test_c_api_c
|
||||
|
||||
.PHONY: test_shortint_ci # Run the tests for shortint ci
|
||||
test_shortint_ci: install_rs_build_toolchain install_cargo_nextest
|
||||
BIG_TESTS_INSTANCE="$(BIG_TESTS_INSTANCE)" \
|
||||
./scripts/shortint-tests.sh $(CARGO_RS_BUILD_TOOLCHAIN)
|
||||
FAST_TESTS="$(FAST_TESTS)" \
|
||||
./scripts/shortint-tests.sh --rust-toolchain $(CARGO_RS_BUILD_TOOLCHAIN) \
|
||||
--cargo-profile "$(CARGO_PROFILE)"
|
||||
|
||||
.PHONY: test_shortint_multi_bit_ci # Run the tests for shortint ci running only multibit tests
|
||||
test_shortint_multi_bit_ci: install_rs_build_toolchain install_cargo_nextest
|
||||
BIG_TESTS_INSTANCE="$(BIG_TESTS_INSTANCE)" \
|
||||
FAST_TESTS="$(FAST_TESTS)" \
|
||||
./scripts/shortint-tests.sh --rust-toolchain $(CARGO_RS_BUILD_TOOLCHAIN) \
|
||||
--cargo-profile "$(CARGO_PROFILE)" --multi-bit
|
||||
|
||||
.PHONY: test_shortint # Run all the tests for shortint
|
||||
test_shortint: install_rs_build_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --profile $(CARGO_PROFILE) \
|
||||
--features=$(TARGET_ARCH_FEATURE),shortint,internal-keycache -p tfhe -- shortint::
|
||||
|
||||
.PHONY: test_shortint_cov # Run the tests of the shortint module with code coverage
|
||||
test_shortint_cov: install_rs_check_toolchain install_tarpaulin
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) tarpaulin --profile $(CARGO_PROFILE) \
|
||||
--out xml --output-dir coverage/shortint --line --engine llvm --timeout 500 \
|
||||
$(COVERAGE_EXCLUDED_FILES) \
|
||||
--features=$(TARGET_ARCH_FEATURE),shortint,internal-keycache,__coverage \
|
||||
-p tfhe -- shortint::
|
||||
|
||||
.PHONY: test_integer_ci # Run the tests for integer ci
|
||||
test_integer_ci: install_rs_build_toolchain install_cargo_nextest
|
||||
test_integer_ci: install_rs_check_toolchain install_cargo_nextest
|
||||
BIG_TESTS_INSTANCE="$(BIG_TESTS_INSTANCE)" \
|
||||
./scripts/integer-tests.sh $(CARGO_RS_BUILD_TOOLCHAIN)
|
||||
FAST_TESTS="$(FAST_TESTS)" \
|
||||
./scripts/integer-tests.sh --rust-toolchain $(CARGO_RS_CHECK_TOOLCHAIN) \
|
||||
--cargo-profile "$(CARGO_PROFILE)" --avx512-support "$(AVX512_SUPPORT)"
|
||||
|
||||
.PHONY: test_integer_multi_bit_ci # Run the tests for integer ci running only multibit tests
|
||||
test_integer_multi_bit_ci: install_rs_check_toolchain install_cargo_nextest
|
||||
BIG_TESTS_INSTANCE="$(BIG_TESTS_INSTANCE)" \
|
||||
FAST_TESTS="$(FAST_TESTS)" \
|
||||
./scripts/integer-tests.sh --rust-toolchain $(CARGO_RS_CHECK_TOOLCHAIN) \
|
||||
--cargo-profile "$(CARGO_PROFILE)" --multi-bit --avx512-support "$(AVX512_SUPPORT)"
|
||||
|
||||
.PHONY: test_safe_deserialization # Run the tests for safe deserialization
|
||||
test_safe_deserialization: install_rs_build_toolchain install_cargo_nextest
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --profile $(CARGO_PROFILE) \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,integer,internal-keycache,safe-deserialization -p tfhe -- safe_deserialization::
|
||||
|
||||
.PHONY: test_integer # Run all the tests for integer
|
||||
test_integer: install_rs_build_toolchain
|
||||
@@ -228,7 +389,8 @@ test_integer: install_rs_build_toolchain
|
||||
.PHONY: test_high_level_api # Run all the tests for high_level_api
|
||||
test_high_level_api: install_rs_build_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --profile $(CARGO_PROFILE) \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,integer,internal-keycache -p tfhe -- high_level_api::
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,integer,internal-keycache -p tfhe \
|
||||
-- high_level_api::
|
||||
|
||||
.PHONY: test_user_doc # Run tests from the .md documentation
|
||||
test_user_doc: install_rs_build_toolchain
|
||||
@@ -236,12 +398,54 @@ test_user_doc: install_rs_build_toolchain
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,integer,internal-keycache -p tfhe \
|
||||
-- test_user_docs::
|
||||
|
||||
.PHONY: test_regex_engine # Run tests for regex_engine example
|
||||
test_regex_engine: install_rs_build_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --profile $(CARGO_PROFILE) \
|
||||
--example regex_engine \
|
||||
--features=$(TARGET_ARCH_FEATURE),integer
|
||||
|
||||
.PHONY: test_sha256_bool # Run tests for sha256_bool example
|
||||
test_sha256_bool: install_rs_build_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --profile $(CARGO_PROFILE) \
|
||||
--example sha256_bool \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean
|
||||
|
||||
.PHONY: test_examples # Run tests for examples
|
||||
test_examples: test_sha256_bool test_regex_engine
|
||||
|
||||
.PHONY: test_trivium # Run tests for trivium
|
||||
test_trivium: install_rs_build_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --profile $(CARGO_PROFILE) \
|
||||
-p tfhe-trivium -- --test-threads=1 trivium::
|
||||
|
||||
.PHONY: test_kreyvium # Run tests for kreyvium
|
||||
test_kreyvium: install_rs_build_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --profile $(CARGO_PROFILE) \
|
||||
-p tfhe-trivium -- --test-threads=1 kreyvium::
|
||||
|
||||
.PHONY: test_concrete_csprng # Run concrete-csprng tests
|
||||
test_concrete_csprng:
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --profile $(CARGO_PROFILE) \
|
||||
--features=$(TARGET_ARCH_FEATURE) -p concrete-csprng
|
||||
|
||||
.PHONY: doc # Build rust doc
|
||||
doc: install_rs_check_toolchain
|
||||
RUSTDOCFLAGS="--html-in-header katex-header.html" \
|
||||
cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" doc \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,integer --no-deps
|
||||
|
||||
.PHONY: docs # Build rust doc alias for doc
|
||||
docs: doc
|
||||
|
||||
.PHONY: lint_doc # Build rust doc with linting enabled
|
||||
lint_doc: install_rs_check_toolchain
|
||||
RUSTDOCFLAGS="--html-in-header katex-header.html -Dwarnings" \
|
||||
cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" doc \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,integer --no-deps
|
||||
|
||||
.PHONY: lint_docs # Build rust doc with linting enabled alias for lint_doc
|
||||
lint_docs: lint_doc
|
||||
|
||||
.PHONY: format_doc_latex # Format the documentation latex equations to avoid broken rendering.
|
||||
format_doc_latex:
|
||||
cargo xtask format_latex_doc
|
||||
@@ -252,13 +456,15 @@ format_doc_latex:
|
||||
@printf "\n===============================\n"
|
||||
|
||||
.PHONY: check_compile_tests # Build tests in debug without running them
|
||||
check_compile_tests: build_c_api
|
||||
check_compile_tests:
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --no-run \
|
||||
--features=$(TARGET_ARCH_FEATURE),experimental,boolean,shortint,integer,internal-keycache \
|
||||
--features=$(TARGET_ARCH_FEATURE),experimental,boolean,shortint,integer,internal-keycache,safe-deserialization \
|
||||
-p tfhe
|
||||
@if [[ "$(OS)" == "Linux" || "$(OS)" == "Darwin" ]]; then \
|
||||
./scripts/c_api_tests.sh --build-only; \
|
||||
fi
|
||||
|
||||
@if [[ "$(OS)" == "Linux" || "$(OS)" == "Darwin" ]]; then \
|
||||
"$(MAKE)" build_c_api; \
|
||||
./scripts/c_api_tests.sh --build-only; \
|
||||
fi
|
||||
|
||||
.PHONY: build_nodejs_test_docker # Build a docker image with tools to run nodejs tests for wasm API
|
||||
build_nodejs_test_docker:
|
||||
@@ -280,22 +486,64 @@ test_nodejs_wasm_api_in_docker: build_nodejs_test_docker
|
||||
test_nodejs_wasm_api: build_node_js_api
|
||||
cd tfhe && node --test js_on_wasm_tests
|
||||
|
||||
.PHONY: test_web_js_api_parallel # Run tests for the web wasm api
|
||||
test_web_js_api_parallel: build_web_js_api_parallel
|
||||
$(MAKE) -C tfhe/web_wasm_parallel_tests test
|
||||
|
||||
.PHONY: ci_test_web_js_api_parallel # Run tests for the web wasm api
|
||||
ci_test_web_js_api_parallel: build_web_js_api_parallel
|
||||
source ~/.nvm/nvm.sh && \
|
||||
nvm install 20 && \
|
||||
nvm use 20 && \
|
||||
$(MAKE) -C tfhe/web_wasm_parallel_tests test-ci
|
||||
|
||||
.PHONY: no_tfhe_typo # Check we did not invert the h and f in tfhe
|
||||
no_tfhe_typo:
|
||||
@./scripts/no_tfhe_typo.sh
|
||||
|
||||
.PHONY: no_dbg_log # Check we did not leave dbg macro calls in the rust code
|
||||
no_dbg_log:
|
||||
@./scripts/no_dbg_calls.sh
|
||||
|
||||
.PHONY: dieharder_csprng # Run the dieharder test suite on our CSPRNG implementation
|
||||
dieharder_csprng: install_dieharder build_concrete_csprng
|
||||
./scripts/dieharder_test.sh
|
||||
|
||||
#
|
||||
# Benchmarks
|
||||
#
|
||||
|
||||
.PHONY: bench_integer # Run benchmarks for integer
|
||||
bench_integer: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) bench \
|
||||
RUSTFLAGS="$(RUSTFLAGS)" __TFHE_RS_BENCH_OP_FLAVOR=$(BENCH_OP_FLAVOR) __TFHE_RS_FAST_BENCH=$(FAST_BENCH) \
|
||||
cargo $(CARGO_RS_CHECK_TOOLCHAIN) bench \
|
||||
--bench integer-bench \
|
||||
--features=$(TARGET_ARCH_FEATURE),integer,internal-keycache,$(AVX512_FEATURE) -p tfhe
|
||||
--features=$(TARGET_ARCH_FEATURE),integer,internal-keycache,$(AVX512_FEATURE) -p tfhe --
|
||||
|
||||
.PHONY: bench_integer_multi_bit # Run benchmarks for integer using multi-bit parameters
|
||||
bench_integer_multi_bit: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" __TFHE_RS_BENCH_TYPE=MULTI_BIT \
|
||||
__TFHE_RS_BENCH_OP_FLAVOR=$(BENCH_OP_FLAVOR) __TFHE_RS_FAST_BENCH=$(FAST_BENCH) \
|
||||
cargo $(CARGO_RS_CHECK_TOOLCHAIN) bench \
|
||||
--bench integer-bench \
|
||||
--features=$(TARGET_ARCH_FEATURE),integer,internal-keycache,$(AVX512_FEATURE) -p tfhe --
|
||||
|
||||
.PHONY: bench_shortint # Run benchmarks for shortint
|
||||
bench_shortint: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) bench \
|
||||
RUSTFLAGS="$(RUSTFLAGS)" __TFHE_RS_BENCH_OP_FLAVOR=$(BENCH_OP_FLAVOR) \
|
||||
cargo $(CARGO_RS_CHECK_TOOLCHAIN) bench \
|
||||
--bench shortint-bench \
|
||||
--features=$(TARGET_ARCH_FEATURE),shortint,internal-keycache,$(AVX512_FEATURE) -p tfhe
|
||||
|
||||
.PHONY: bench_shortint_multi_bit # Run benchmarks for shortint using multi-bit parameters
|
||||
bench_shortint_multi_bit: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" __TFHE_RS_BENCH_TYPE=MULTI_BIT \
|
||||
__TFHE_RS_BENCH_OP_FLAVOR=$(BENCH_OP_FLAVOR) \
|
||||
cargo $(CARGO_RS_CHECK_TOOLCHAIN) bench \
|
||||
--bench shortint-bench \
|
||||
--features=$(TARGET_ARCH_FEATURE),shortint,internal-keycache,$(AVX512_FEATURE) -p tfhe --
|
||||
|
||||
|
||||
.PHONY: bench_boolean # Run benchmarks for boolean
|
||||
bench_boolean: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) bench \
|
||||
@@ -308,26 +556,89 @@ bench_pbs: install_rs_check_toolchain
|
||||
--bench pbs-bench \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,internal-keycache,$(AVX512_FEATURE) -p tfhe
|
||||
|
||||
.PHONY: bench_web_js_api_parallel # Run benchmarks for the web wasm api
|
||||
bench_web_js_api_parallel: build_web_js_api_parallel
|
||||
$(MAKE) -C tfhe/web_wasm_parallel_tests bench
|
||||
|
||||
.PHONY: ci_bench_web_js_api_parallel # Run benchmarks for the web wasm api
|
||||
ci_bench_web_js_api_parallel: build_web_js_api_parallel
|
||||
source ~/.nvm/nvm.sh && \
|
||||
nvm use node && \
|
||||
$(MAKE) -C tfhe/web_wasm_parallel_tests bench-ci
|
||||
|
||||
#
|
||||
# Utility tools
|
||||
#
|
||||
|
||||
.PHONY: measure_hlapi_compact_pk_ct_sizes # Measure sizes of public keys and ciphertext for high-level API
|
||||
measure_hlapi_compact_pk_ct_sizes: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) run --profile $(CARGO_PROFILE) \
|
||||
--example hlapi_compact_pk_ct_sizes \
|
||||
--features=$(TARGET_ARCH_FEATURE),integer,internal-keycache
|
||||
|
||||
.PHONY: measure_shortint_key_sizes # Measure sizes of bootstrapping and key switching keys for shortint
|
||||
measure_shortint_key_sizes: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) run \
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) run --profile $(CARGO_PROFILE) \
|
||||
--example shortint_key_sizes \
|
||||
--features=$(TARGET_ARCH_FEATURE),shortint,internal-keycache
|
||||
|
||||
.PHONY: measure_boolean_key_sizes # Measure sizes of bootstrapping and key switching keys for boolean
|
||||
measure_boolean_key_sizes: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) run \
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) run --profile $(CARGO_PROFILE) \
|
||||
--example boolean_key_sizes \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,internal-keycache
|
||||
|
||||
.PHONY: parse_integer_benches # Run python parser to output a csv containing integer benches data
|
||||
parse_integer_benches:
|
||||
python3 ./ci/parse_integer_benches_to_csv.py \
|
||||
--criterion-dir target/criterion \
|
||||
--output-file "$(PARSE_INTEGER_BENCH_CSV_FILE)"
|
||||
|
||||
.PHONY: parse_wasm_benchmarks # Parse benchmarks performed with WASM web client into a CSV file
|
||||
parse_wasm_benchmarks: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) run --profile $(CARGO_PROFILE) \
|
||||
--example wasm_benchmarks_parser \
|
||||
--features=$(TARGET_ARCH_FEATURE),shortint,internal-keycache \
|
||||
-- web_wasm_parallel_tests/test/benchmark_results
|
||||
|
||||
.PHONY: write_params_to_file # Gather all crypto parameters into a file with a Sage readable format.
|
||||
write_params_to_file: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) run --profile $(CARGO_PROFILE) \
|
||||
--example write_params_to_file \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean,shortint,internal-keycache
|
||||
|
||||
#
|
||||
# Real use case examples
|
||||
#
|
||||
|
||||
.PHONY: regex_engine # Run regex_engine example
|
||||
regex_engine: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) run --profile $(CARGO_PROFILE) \
|
||||
--example regex_engine \
|
||||
--features=$(TARGET_ARCH_FEATURE),integer \
|
||||
-- $(REGEX_STRING) $(REGEX_PATTERN)
|
||||
|
||||
.PHONY: dark_market # Run dark market example
|
||||
dark_market: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) run --profile $(CARGO_PROFILE) \
|
||||
--example dark_market \
|
||||
--features=$(TARGET_ARCH_FEATURE),integer,internal-keycache \
|
||||
-- fhe-modified fhe-parallel plain fhe
|
||||
|
||||
.PHONY: sha256_bool # Run sha256_bool example
|
||||
sha256_bool: install_rs_check_toolchain
|
||||
RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_CHECK_TOOLCHAIN) run --profile $(CARGO_PROFILE) \
|
||||
--example sha256_bool \
|
||||
--features=$(TARGET_ARCH_FEATURE),boolean
|
||||
|
||||
.PHONY: pcc # pcc stands for pre commit checks
|
||||
pcc: no_tfhe_typo check_fmt doc clippy_all check_compile_tests
|
||||
pcc: no_tfhe_typo no_dbg_log check_fmt lint_doc clippy_all check_compile_tests
|
||||
|
||||
.PHONY: fpcc # pcc stands for pre commit checks, the f stands for fast
|
||||
fpcc: no_tfhe_typo check_fmt doc clippy_fast check_compile_tests
|
||||
fpcc: no_tfhe_typo no_dbg_log check_fmt lint_doc clippy_fast check_compile_tests
|
||||
|
||||
.PHONY: conformance # Automatically fix problems that can be fixed
|
||||
conformance: fmt
|
||||
conformance: fix_newline fmt
|
||||
|
||||
.PHONY: help # Generate list of targets with descriptions
|
||||
help:
|
||||
|
||||
165
README.md
165
README.md
@@ -31,7 +31,9 @@ implementation. The goal is to have a stable, simple, high-performance, and
|
||||
production-ready library for all the advanced features of TFHE.
|
||||
|
||||
## Getting Started
|
||||
The steps to run a first example are described below.
|
||||
|
||||
### Cargo.toml configuration
|
||||
To use the latest version of `TFHE-rs` in your project, you first need to add it as a dependency in your `Cargo.toml`:
|
||||
|
||||
+ For x86_64-based machines running Unix-like OSes:
|
||||
@@ -45,7 +47,7 @@ tfhe = { version = "*", features = ["boolean", "shortint", "integer", "x86_64-un
|
||||
```toml
|
||||
tfhe = { version = "*", features = ["boolean", "shortint", "integer", "aarch64-unix"] }
|
||||
```
|
||||
Note: users with ARM devices must use `TFHE-rs` by compiling using the `nightly` toolchain.
|
||||
Note: users with ARM devices must compile `TFHE-rs` using a stable toolchain with version >= 1.72.
|
||||
|
||||
|
||||
+ For x86_64-based machines with the [`rdseed instruction`](https://en.wikipedia.org/wiki/RDRAND)
|
||||
@@ -57,95 +59,69 @@ tfhe = { version = "*", features = ["boolean", "shortint", "integer", "x86_64"]
|
||||
|
||||
Note: aarch64-based machines are not yet supported for Windows as it's currently missing an entropy source to be able to seed the [CSPRNGs](https://en.wikipedia.org/wiki/Cryptographically_secure_pseudorandom_number_generator) used in TFHE-rs
|
||||
|
||||
|
||||
## A simple example
|
||||
|
||||
Here is a full example:
|
||||
|
||||
``` rust
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{generate_keys, set_server_key, ConfigBuilder, FheUint32, FheUint8};
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Basic configuration to use homomorphic integers
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
|
||||
// Key generation
|
||||
let (client_key, server_keys) = generate_keys(config);
|
||||
|
||||
let clear_a = 1344u32;
|
||||
let clear_b = 5u32;
|
||||
let clear_c = 7u8;
|
||||
|
||||
// Encrypting the input data using the (private) client_key
|
||||
// FheUint32: Encrypted equivalent to u32
|
||||
let mut encrypted_a = FheUint32::try_encrypt(clear_a, &client_key)?;
|
||||
let encrypted_b = FheUint32::try_encrypt(clear_b, &client_key)?;
|
||||
|
||||
// FheUint8: Encrypted equivalent to u8
|
||||
let encrypted_c = FheUint8::try_encrypt(clear_c, &client_key)?;
|
||||
|
||||
// On the server side:
|
||||
set_server_key(server_keys);
|
||||
|
||||
// Clear equivalent computations: 1344 * 5 = 6720
|
||||
let encrypted_res_mul = &encrypted_a * &encrypted_b;
|
||||
|
||||
// Clear equivalent computations: 1344 >> 5 = 42
|
||||
encrypted_a = &encrypted_res_mul >> &encrypted_b;
|
||||
|
||||
// Clear equivalent computations: let casted_a = a as u8;
|
||||
let casted_a: FheUint8 = encrypted_a.cast_into();
|
||||
|
||||
// Clear equivalent computations: min(42, 7) = 7
|
||||
let encrypted_res_min = &casted_a.min(&encrypted_c);
|
||||
|
||||
// Operation between clear and encrypted data:
|
||||
// Clear equivalent computations: 7 & 1 = 1
|
||||
let encrypted_res = encrypted_res_min & 1_u8;
|
||||
|
||||
// Decrypting on the client side:
|
||||
let clear_res: u8 = encrypted_res.decrypt(&client_key);
|
||||
assert_eq!(clear_res, 1_u8);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
To run this code, use the following command:
|
||||
<p align="center"> <code> cargo run --release </code> </p>
|
||||
|
||||
Note that when running code that uses `tfhe-rs`, it is highly recommended
|
||||
to run in release mode with cargo`s `--release` flag to have the best performances possible,
|
||||
eg: `cargo run --release`.
|
||||
to run in release mode with cargo's `--release` flag to have the best performances possible,
|
||||
|
||||
Here is a full example evaluating a Boolean circuit:
|
||||
|
||||
```rust
|
||||
use tfhe::boolean::prelude::*;
|
||||
|
||||
fn main() {
|
||||
// We generate a set of client/server keys, using the default parameters:
|
||||
let (mut client_key, mut server_key) = gen_keys();
|
||||
|
||||
// We use the client secret key to encrypt two messages:
|
||||
let ct_1 = client_key.encrypt(true);
|
||||
let ct_2 = client_key.encrypt(false);
|
||||
|
||||
// We use the server public key to execute a boolean circuit:
|
||||
// if ((NOT ct_2) NAND (ct_1 AND ct_2)) then (NOT ct_2) else (ct_1 AND ct_2)
|
||||
let ct_3 = server_key.not(&ct_2);
|
||||
let ct_4 = server_key.and(&ct_1, &ct_2);
|
||||
let ct_5 = server_key.nand(&ct_3, &ct_4);
|
||||
let ct_6 = server_key.mux(&ct_5, &ct_3, &ct_4);
|
||||
|
||||
// We use the client key to decrypt the output of the circuit:
|
||||
let output = client_key.decrypt(&ct_6);
|
||||
assert_eq!(output, true);
|
||||
}
|
||||
```
|
||||
|
||||
Another example of how the library can be used with shortints:
|
||||
|
||||
```rust
|
||||
use tfhe::shortint::prelude::*;
|
||||
|
||||
fn main() {
|
||||
// Generate a set of client/server keys
|
||||
// with 2 bits of message and 2 bits of carry
|
||||
let (client_key, server_key) = gen_keys(PARAM_MESSAGE_2_CARRY_2);
|
||||
|
||||
let msg1 = 3;
|
||||
let msg2 = 2;
|
||||
|
||||
// Encrypt two messages using the (private) client key:
|
||||
let ct_1 = client_key.encrypt(msg1);
|
||||
let ct_2 = client_key.encrypt(msg2);
|
||||
|
||||
// Homomorphically compute an addition
|
||||
let ct_add = server_key.unchecked_add(&ct_1, &ct_2);
|
||||
|
||||
// Define the Hamming weight function
|
||||
// f: x -> sum of the bits of x
|
||||
let f = |x:u64| x.count_ones() as u64;
|
||||
|
||||
// Generate the accumulator for the function
|
||||
let acc = server_key.generate_accumulator(f);
|
||||
|
||||
// Compute the function over the ciphertext using the PBS
|
||||
let ct_res = server_key.apply_lookup_table(&ct_add, &acc);
|
||||
|
||||
// Decrypt the ciphertext using the (private) client key
|
||||
let output = client_key.decrypt(&ct_res);
|
||||
assert_eq!(output, f(msg1 + msg2));
|
||||
}
|
||||
```
|
||||
|
||||
An example using integer:
|
||||
|
||||
```rust
|
||||
use tfhe::integer::gen_keys_radix;
|
||||
use tfhe::shortint::parameters::PARAM_MESSAGE_2_CARRY_2;
|
||||
|
||||
fn main() {
|
||||
// We create keys to create 16 bits integers
|
||||
// using 8 blocks of 2 bits
|
||||
let (cks, sks) = gen_keys_radix(&PARAM_MESSAGE_2_CARRY_2, 8);
|
||||
|
||||
let clear_a = 2382u16;
|
||||
let clear_b = 29374u16;
|
||||
|
||||
let mut a = cks.encrypt(clear_a as u64);
|
||||
let mut b = cks.encrypt(clear_b as u64);
|
||||
|
||||
let encrypted_max = sks.smart_max_parallelized(&mut a, &mut b);
|
||||
let decrypted_max: u64 = cks.decrypt(&encrypted_max);
|
||||
|
||||
assert_eq!(decrypted_max as u16, clear_a.max(clear_b))
|
||||
}
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -167,6 +143,19 @@ libraries.
|
||||
<img src="https://user-images.githubusercontent.com/5758427/231115030-21195b55-2629-4c01-9809-be5059243999.png">
|
||||
</a>
|
||||
|
||||
## Citing TFHE-rs
|
||||
|
||||
To cite TFHE-rs in academic papers, please use the following entry:
|
||||
|
||||
```text
|
||||
@Misc{TFHE-rs,
|
||||
title={{TFHE-rs: A Pure Rust Implementation of the TFHE Scheme for Boolean and Integer Arithmetics Over Encrypted Data}},
|
||||
author={Zama},
|
||||
year={2022},
|
||||
note={\url{https://github.com/zama-ai/tfhe-rs}},
|
||||
}
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This software is distributed under the BSD-3-Clause-Clear license. If you have any questions,
|
||||
|
||||
24
apps/trivium/Cargo.toml
Normal file
24
apps/trivium/Cargo.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "tfhe-trivium"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
rayon = { version = "1.7.0"}
|
||||
|
||||
[target.'cfg(target_arch = "x86_64")'.dependencies.tfhe]
|
||||
path = "../../tfhe"
|
||||
features = [ "boolean", "shortint", "integer", "x86_64" ]
|
||||
|
||||
[target.'cfg(target_arch = "aarch64")'.dependencies.tfhe]
|
||||
path = "../../tfhe"
|
||||
features = [ "boolean", "shortint", "integer", "aarch64-unix" ]
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.5.1", features = [ "html_reports" ]}
|
||||
|
||||
[[bench]]
|
||||
name = "trivium"
|
||||
harness = false
|
||||
204
apps/trivium/README.md
Normal file
204
apps/trivium/README.md
Normal file
@@ -0,0 +1,204 @@
|
||||
# FHE boolean Trivium implementation using TFHE-rs
|
||||
|
||||
The cleartext boolean Trivium is available to be built using the function `TriviumStream::<bool>::new`.
|
||||
This takes as input 2 arrays of 80 bool: the Trivium key and the IV. After initialization, it returns a TriviumStream on
|
||||
which the user can call `next`, getting the next bit of the cipher stream, or `next_64`, which will compute 64 values at once,
|
||||
using multithreading to accelerate the computation.
|
||||
|
||||
|
||||
Quite similarly, the function `TriviumStream::<FheBool>::new` will return a very similar object running in FHE space. Its arguments are
|
||||
2 arrays of 80 FheBool representing the encrypted Trivium key, and the encrypted IV. It also requires a reference to the the server key of the
|
||||
current scheme. This means that any user of this feature must also have the `tfhe-rs` crate as a dependency.
|
||||
|
||||
|
||||
Example of a Rust main below:
|
||||
```rust
|
||||
use tfhe::{ConfigBuilder, generate_keys, FheBool};
|
||||
use tfhe::prelude::*;
|
||||
|
||||
use tfhe_trivium::TriviumStream;
|
||||
|
||||
fn get_hexadecimal_string_from_lsb_first_stream(a: Vec<bool>) -> String {
|
||||
assert!(a.len() % 8 == 0);
|
||||
let mut hexadecimal: String = "".to_string();
|
||||
for test in a.chunks(8) {
|
||||
// Encoding is bytes in LSB order
|
||||
match test[4..8] {
|
||||
[false, false, false, false] => hexadecimal.push('0'),
|
||||
[true, false, false, false] => hexadecimal.push('1'),
|
||||
[false, true, false, false] => hexadecimal.push('2'),
|
||||
[true, true, false, false] => hexadecimal.push('3'),
|
||||
|
||||
[false, false, true, false] => hexadecimal.push('4'),
|
||||
[true, false, true, false] => hexadecimal.push('5'),
|
||||
[false, true, true, false] => hexadecimal.push('6'),
|
||||
[true, true, true, false] => hexadecimal.push('7'),
|
||||
|
||||
[false, false, false, true] => hexadecimal.push('8'),
|
||||
[true, false, false, true] => hexadecimal.push('9'),
|
||||
[false, true, false, true] => hexadecimal.push('A'),
|
||||
[true, true, false, true] => hexadecimal.push('B'),
|
||||
|
||||
[false, false, true, true] => hexadecimal.push('C'),
|
||||
[true, false, true, true] => hexadecimal.push('D'),
|
||||
[false, true, true, true] => hexadecimal.push('E'),
|
||||
[true, true, true, true] => hexadecimal.push('F'),
|
||||
_ => ()
|
||||
};
|
||||
match test[0..4] {
|
||||
[false, false, false, false] => hexadecimal.push('0'),
|
||||
[true, false, false, false] => hexadecimal.push('1'),
|
||||
[false, true, false, false] => hexadecimal.push('2'),
|
||||
[true, true, false, false] => hexadecimal.push('3'),
|
||||
|
||||
[false, false, true, false] => hexadecimal.push('4'),
|
||||
[true, false, true, false] => hexadecimal.push('5'),
|
||||
[false, true, true, false] => hexadecimal.push('6'),
|
||||
[true, true, true, false] => hexadecimal.push('7'),
|
||||
|
||||
[false, false, false, true] => hexadecimal.push('8'),
|
||||
[true, false, false, true] => hexadecimal.push('9'),
|
||||
[false, true, false, true] => hexadecimal.push('A'),
|
||||
[true, true, false, true] => hexadecimal.push('B'),
|
||||
|
||||
[false, false, true, true] => hexadecimal.push('C'),
|
||||
[true, false, true, true] => hexadecimal.push('D'),
|
||||
[false, true, true, true] => hexadecimal.push('E'),
|
||||
[true, true, true, true] => hexadecimal.push('F'),
|
||||
_ => ()
|
||||
};
|
||||
}
|
||||
return hexadecimal;
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let config = ConfigBuilder::all_disabled().enable_default_bool().build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [false; 80];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&key_string[i..i+2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8*(i>>1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [false; 80];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&iv_string[i..i+2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8*(i>>1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let output_0_63 = "F4CD954A717F26A7D6930830C4E7CF0819F80E03F25F342C64ADC66ABA7F8A8E6EAA49F23632AE3CD41A7BD290A0132F81C6D4043B6E397D7388F3A03B5FE358".to_string();
|
||||
|
||||
let cipher_key = key.map(|x| FheBool::encrypt(x, &client_key));
|
||||
let cipher_iv = iv.map(|x| FheBool::encrypt(x, &client_key));
|
||||
|
||||
|
||||
let mut trivium = TriviumStream::<FheBool>::new(cipher_key, cipher_iv, &server_key);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(64*8);
|
||||
while vec.len() < 64*8 {
|
||||
let cipher_outputs = trivium.next_64();
|
||||
for c in cipher_outputs {
|
||||
vec.push(c.decrypt(&client_key))
|
||||
}
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64*2]);
|
||||
}
|
||||
```
|
||||
|
||||
# FHE byte Trivium implementation
|
||||
|
||||
The same objects have also been implemented to stream bytes instead of booleans. They can be constructed and used in the same way via the functions `TriviumStreamByte::<u8>::new` and
|
||||
`TriviumStreamByte::<FheUint8>::new` with the same arguments as before. The `FheUint8` version is significantly slower than the `FheBool` version, because not running
|
||||
with the same cryptographic parameters. Its interest lie in its trans-ciphering capabilities: `TriviumStreamByte<FheUint8>` implements the trait `TransCiphering`,
|
||||
meaning it implements the functions `trans_encrypt_64`. This function takes as input a `FheUint64` and outputs a `FheUint64`, the output being
|
||||
encrypted via tfhe and trivium. For convenience we also provide `trans_decrypt_64`, but this is of course the exact same function.
|
||||
|
||||
Other sizes than 64 bit are expected to be available in the future.
|
||||
|
||||
# FHE shortint Trivium implementation
|
||||
|
||||
The same implementation is also available for generic Ciphertexts representing bits (meant to be used with parameters `PARAM_MESSAGE_1_CARRY_1_KS_PBS`). It uses a lower level API
|
||||
of tfhe-rs, so the syntax is a little bit different. It also implements the `TransCiphering` trait. For optimization purposes, it does not internally run on the same
|
||||
cryptographic parameters as the high level API of tfhe-rs. As such, it requires the usage of a casting key, to switch from one parameter space to another, which makes
|
||||
its setup a little more intricate.
|
||||
|
||||
Example code:
|
||||
```rust
|
||||
use tfhe::shortint::prelude::*;
|
||||
use tfhe::shortint::CastingKey;
|
||||
|
||||
use tfhe::{ConfigBuilder, generate_keys, FheUint64};
|
||||
use tfhe::prelude::*;
|
||||
|
||||
use tfhe_trivium::TriviumStreamShortint;
|
||||
|
||||
fn test_shortint() {
|
||||
let config = ConfigBuilder::all_disabled().enable_default_integers().build();
|
||||
let (hl_client_key, hl_server_key) = generate_keys(config);
|
||||
let (client_key, server_key): (ClientKey, ServerKey) = gen_keys(PARAM_MESSAGE_1_CARRY_1_KS_PBS);
|
||||
let ksk = CastingKey::new((&client_key, &server_key), (&hl_client_key, &hl_server_key));
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0; 80];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&key_string[i..i+2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8*(i>>1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0; 80];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&iv_string[i..i+2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8*(i>>1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
let output_0_63 = "F4CD954A717F26A7D6930830C4E7CF0819F80E03F25F342C64ADC66ABA7F8A8E6EAA49F23632AE3CD41A7BD290A0132F81C6D4043B6E397D7388F3A03B5FE358".to_string();
|
||||
|
||||
let cipher_key = key.map(|x| client_key.encrypt(x));
|
||||
let cipher_iv = iv.map(|x| client_key.encrypt(x));
|
||||
|
||||
let mut ciphered_message = vec![FheUint64::try_encrypt(0u64, &hl_client_key).unwrap(); 9];
|
||||
|
||||
let mut trivium = TriviumStreamShortint::new(cipher_key, cipher_iv, &server_key, &ksk);
|
||||
|
||||
let mut vec = Vec::<u64>::with_capacity(8);
|
||||
while vec.len() < 8 {
|
||||
let trans_ciphered_message = trivium.trans_encrypt_64(ciphered_message.pop().unwrap(), &hl_server_key);
|
||||
vec.push(trans_ciphered_message.decrypt(&hl_client_key));
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexagonal_string_from_u64(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64*2]);
|
||||
}
|
||||
```
|
||||
|
||||
# FHE Kreyvium implementation using tfhe-rs crate
|
||||
|
||||
This will work in exactly the same way as the Trivium implementation, except that the key and iv need to be 128 bits now. Available for the same internal types as Trivium, with similar syntax.
|
||||
|
||||
`KreyviumStreamByte<FheUint8>` and `KreyviumStreamShortint` also implement the `TransCiphering` trait.
|
||||
|
||||
# Testing
|
||||
|
||||
If you wish to run tests on this app, please run `cargo test -r trivium -- --test-threads=1` as multithreading provokes interferences between several running
|
||||
Triviums at the same time.
|
||||
75
apps/trivium/benches/kreyvium_bool.rs
Normal file
75
apps/trivium/benches/kreyvium_bool.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{generate_keys, ConfigBuilder, FheBool};
|
||||
|
||||
use tfhe_trivium::KreyviumStream;
|
||||
|
||||
use criterion::Criterion;
|
||||
|
||||
pub fn kreyvium_bool_gen(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled().enable_default_bool().build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [false; 128];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [false; 128];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let cipher_key = key.map(|x| FheBool::encrypt(x, &client_key));
|
||||
|
||||
let mut kreyvium = KreyviumStream::<FheBool>::new(cipher_key, iv, &server_key);
|
||||
|
||||
c.bench_function("kreyvium bool generate 64 bits", |b| {
|
||||
b.iter(|| kreyvium.next_64())
|
||||
});
|
||||
}
|
||||
|
||||
pub fn kreyvium_bool_warmup(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled().enable_default_bool().build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [false; 128];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [false; 128];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
c.bench_function("kreyvium bool warmup", |b| {
|
||||
b.iter(|| {
|
||||
let cipher_key = key.map(|x| FheBool::encrypt(x, &client_key));
|
||||
let _kreyvium = KreyviumStream::<FheBool>::new(cipher_key, iv, &server_key);
|
||||
})
|
||||
});
|
||||
}
|
||||
96
apps/trivium/benches/kreyvium_byte.rs
Normal file
96
apps/trivium/benches/kreyvium_byte.rs
Normal file
@@ -0,0 +1,96 @@
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{generate_keys, ConfigBuilder, FheUint64, FheUint8};
|
||||
|
||||
use tfhe_trivium::{KreyviumStreamByte, TransCiphering};
|
||||
|
||||
use criterion::Criterion;
|
||||
|
||||
pub fn kreyvium_byte_gen(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.enable_function_evaluation_integers()
|
||||
.build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [0u8; 16];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [0u8; 16];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let cipher_key = key.map(|x| FheUint8::encrypt(x, &client_key));
|
||||
|
||||
let mut kreyvium = KreyviumStreamByte::<FheUint8>::new(cipher_key, iv, &server_key);
|
||||
|
||||
c.bench_function("kreyvium byte generate 64 bits", |b| {
|
||||
b.iter(|| kreyvium.next_64())
|
||||
});
|
||||
}
|
||||
|
||||
pub fn kreyvium_byte_trans(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.enable_function_evaluation_integers()
|
||||
.build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [0u8; 16];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [0u8; 16];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let cipher_key = key.map(|x| FheUint8::encrypt(x, &client_key));
|
||||
|
||||
let ciphered_message = FheUint64::try_encrypt(0u64, &client_key).unwrap();
|
||||
let mut kreyvium = KreyviumStreamByte::<FheUint8>::new(cipher_key, iv, &server_key);
|
||||
|
||||
c.bench_function("kreyvium byte transencrypt 64 bits", |b| {
|
||||
b.iter(|| kreyvium.trans_encrypt_64(ciphered_message.clone()))
|
||||
});
|
||||
}
|
||||
|
||||
pub fn kreyvium_byte_warmup(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.enable_function_evaluation_integers()
|
||||
.build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [0u8; 16];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [0u8; 16];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
c.bench_function("kreyvium byte warmup", |b| {
|
||||
b.iter(|| {
|
||||
let cipher_key = key.map(|x| FheUint8::encrypt(x, &client_key));
|
||||
let _kreyvium = KreyviumStreamByte::<FheUint8>::new(cipher_key, iv, &server_key);
|
||||
})
|
||||
});
|
||||
}
|
||||
155
apps/trivium/benches/kreyvium_shortint.rs
Normal file
155
apps/trivium/benches/kreyvium_shortint.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::shortint::prelude::*;
|
||||
use tfhe::shortint::KeySwitchingKey;
|
||||
use tfhe::{generate_keys, ConfigBuilder, FheUint64};
|
||||
|
||||
use tfhe_trivium::{KreyviumStreamShortint, TransCiphering};
|
||||
|
||||
use criterion::Criterion;
|
||||
|
||||
pub fn kreyvium_shortint_warmup(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (hl_client_key, hl_server_key) = generate_keys(config);
|
||||
let underlying_ck: tfhe::shortint::ClientKey = (*hl_client_key.as_ref()).clone().into();
|
||||
let underlying_sk: tfhe::shortint::ServerKey = (*hl_server_key.as_ref()).clone().into();
|
||||
|
||||
let (client_key, server_key): (ClientKey, ServerKey) = gen_keys(PARAM_MESSAGE_1_CARRY_1_KS_PBS);
|
||||
|
||||
let ksk = KeySwitchingKey::new(
|
||||
(&client_key, &server_key),
|
||||
(&underlying_ck, &underlying_sk),
|
||||
PARAM_KEYSWITCH_1_1_KS_PBS_TO_2_2_KS_PBS,
|
||||
);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [0; 128];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [0; 128];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
c.bench_function("kreyvium 1_1 warmup", |b| {
|
||||
b.iter(|| {
|
||||
let cipher_key = key.map(|x| client_key.encrypt(x));
|
||||
let _kreyvium = KreyviumStreamShortint::new(
|
||||
cipher_key,
|
||||
iv,
|
||||
server_key.clone(),
|
||||
ksk.clone(),
|
||||
hl_server_key.clone(),
|
||||
);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
pub fn kreyvium_shortint_gen(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (hl_client_key, hl_server_key) = generate_keys(config);
|
||||
let underlying_ck: tfhe::shortint::ClientKey = (*hl_client_key.as_ref()).clone().into();
|
||||
let underlying_sk: tfhe::shortint::ServerKey = (*hl_server_key.as_ref()).clone().into();
|
||||
|
||||
let (client_key, server_key): (ClientKey, ServerKey) = gen_keys(PARAM_MESSAGE_1_CARRY_1_KS_PBS);
|
||||
|
||||
let ksk = KeySwitchingKey::new(
|
||||
(&client_key, &server_key),
|
||||
(&underlying_ck, &underlying_sk),
|
||||
PARAM_KEYSWITCH_1_1_KS_PBS_TO_2_2_KS_PBS,
|
||||
);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [0; 128];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [0; 128];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let cipher_key = key.map(|x| client_key.encrypt(x));
|
||||
|
||||
let mut kreyvium = KreyviumStreamShortint::new(cipher_key, iv, server_key, ksk, hl_server_key);
|
||||
|
||||
c.bench_function("kreyvium 1_1 generate 64 bits", |b| {
|
||||
b.iter(|| kreyvium.next_64())
|
||||
});
|
||||
}
|
||||
|
||||
pub fn kreyvium_shortint_trans(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (hl_client_key, hl_server_key) = generate_keys(config);
|
||||
let underlying_ck: tfhe::shortint::ClientKey = (*hl_client_key.as_ref()).clone().into();
|
||||
let underlying_sk: tfhe::shortint::ServerKey = (*hl_server_key.as_ref()).clone().into();
|
||||
|
||||
let (client_key, server_key): (ClientKey, ServerKey) = gen_keys(PARAM_MESSAGE_1_CARRY_1_KS_PBS);
|
||||
|
||||
let ksk = KeySwitchingKey::new(
|
||||
(&client_key, &server_key),
|
||||
(&underlying_ck, &underlying_sk),
|
||||
PARAM_KEYSWITCH_1_1_KS_PBS_TO_2_2_KS_PBS,
|
||||
);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [0; 128];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [0; 128];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let cipher_key = key.map(|x| client_key.encrypt(x));
|
||||
|
||||
let ciphered_message = FheUint64::try_encrypt(0u64, &hl_client_key).unwrap();
|
||||
let mut kreyvium = KreyviumStreamShortint::new(cipher_key, iv, server_key, ksk, hl_server_key);
|
||||
|
||||
c.bench_function("kreyvium 1_1 transencrypt 64 bits", |b| {
|
||||
b.iter(|| kreyvium.trans_encrypt_64(ciphered_message.clone()))
|
||||
});
|
||||
}
|
||||
53
apps/trivium/benches/trivium.rs
Normal file
53
apps/trivium/benches/trivium.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use criterion::{criterion_group, criterion_main};
|
||||
|
||||
mod trivium_bool;
|
||||
criterion_group!(
|
||||
trivium_bool,
|
||||
trivium_bool::trivium_bool_gen,
|
||||
trivium_bool::trivium_bool_warmup
|
||||
);
|
||||
mod kreyvium_bool;
|
||||
criterion_group!(
|
||||
kreyvium_bool,
|
||||
kreyvium_bool::kreyvium_bool_gen,
|
||||
kreyvium_bool::kreyvium_bool_warmup
|
||||
);
|
||||
|
||||
mod trivium_shortint;
|
||||
criterion_group!(
|
||||
trivium_shortint,
|
||||
trivium_shortint::trivium_shortint_gen,
|
||||
trivium_shortint::trivium_shortint_warmup,
|
||||
trivium_shortint::trivium_shortint_trans
|
||||
);
|
||||
mod kreyvium_shortint;
|
||||
criterion_group!(
|
||||
kreyvium_shortint,
|
||||
kreyvium_shortint::kreyvium_shortint_gen,
|
||||
kreyvium_shortint::kreyvium_shortint_warmup,
|
||||
kreyvium_shortint::kreyvium_shortint_trans
|
||||
);
|
||||
|
||||
mod trivium_byte;
|
||||
criterion_group!(
|
||||
trivium_byte,
|
||||
trivium_byte::trivium_byte_gen,
|
||||
trivium_byte::trivium_byte_trans,
|
||||
trivium_byte::trivium_byte_warmup
|
||||
);
|
||||
mod kreyvium_byte;
|
||||
criterion_group!(
|
||||
kreyvium_byte,
|
||||
kreyvium_byte::kreyvium_byte_gen,
|
||||
kreyvium_byte::kreyvium_byte_trans,
|
||||
kreyvium_byte::kreyvium_byte_warmup
|
||||
);
|
||||
|
||||
criterion_main!(
|
||||
trivium_bool,
|
||||
trivium_shortint,
|
||||
trivium_byte,
|
||||
kreyvium_bool,
|
||||
kreyvium_shortint,
|
||||
kreyvium_byte,
|
||||
);
|
||||
75
apps/trivium/benches/trivium_bool.rs
Normal file
75
apps/trivium/benches/trivium_bool.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{generate_keys, ConfigBuilder, FheBool};
|
||||
|
||||
use tfhe_trivium::TriviumStream;
|
||||
|
||||
use criterion::Criterion;
|
||||
|
||||
pub fn trivium_bool_gen(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled().enable_default_bool().build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [false; 80];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [false; 80];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let cipher_key = key.map(|x| FheBool::encrypt(x, &client_key));
|
||||
|
||||
let mut trivium = TriviumStream::<FheBool>::new(cipher_key, iv, &server_key);
|
||||
|
||||
c.bench_function("trivium bool generate 64 bits", |b| {
|
||||
b.iter(|| trivium.next_64())
|
||||
});
|
||||
}
|
||||
|
||||
pub fn trivium_bool_warmup(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled().enable_default_bool().build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [false; 80];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [false; 80];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
c.bench_function("trivium bool warmup", |b| {
|
||||
b.iter(|| {
|
||||
let cipher_key = key.map(|x| FheBool::encrypt(x, &client_key));
|
||||
let _trivium = TriviumStream::<FheBool>::new(cipher_key, iv, &server_key);
|
||||
})
|
||||
});
|
||||
}
|
||||
93
apps/trivium/benches/trivium_byte.rs
Normal file
93
apps/trivium/benches/trivium_byte.rs
Normal file
@@ -0,0 +1,93 @@
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{generate_keys, ConfigBuilder, FheUint64, FheUint8};
|
||||
|
||||
use tfhe_trivium::{TransCiphering, TriviumStreamByte};
|
||||
|
||||
use criterion::Criterion;
|
||||
|
||||
pub fn trivium_byte_gen(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0u8; 10];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0u8; 10];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let cipher_key = key.map(|x| FheUint8::encrypt(x, &client_key));
|
||||
|
||||
let mut trivium = TriviumStreamByte::<FheUint8>::new(cipher_key, iv, &server_key);
|
||||
|
||||
c.bench_function("trivium byte generate 64 bits", |b| {
|
||||
b.iter(|| trivium.next_64())
|
||||
});
|
||||
}
|
||||
|
||||
pub fn trivium_byte_trans(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0u8; 10];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0u8; 10];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let cipher_key = key.map(|x| FheUint8::encrypt(x, &client_key));
|
||||
|
||||
let ciphered_message = FheUint64::try_encrypt(0u64, &client_key).unwrap();
|
||||
let mut trivium = TriviumStreamByte::<FheUint8>::new(cipher_key, iv, &server_key);
|
||||
|
||||
c.bench_function("trivium byte transencrypt 64 bits", |b| {
|
||||
b.iter(|| trivium.trans_encrypt_64(ciphered_message.clone()))
|
||||
});
|
||||
}
|
||||
|
||||
pub fn trivium_byte_warmup(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0u8; 10];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0u8; 10];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
c.bench_function("trivium byte warmup", |b| {
|
||||
b.iter(|| {
|
||||
let cipher_key = key.map(|x| FheUint8::encrypt(x, &client_key));
|
||||
let _trivium = TriviumStreamByte::<FheUint8>::new(cipher_key, iv, &server_key);
|
||||
})
|
||||
});
|
||||
}
|
||||
155
apps/trivium/benches/trivium_shortint.rs
Normal file
155
apps/trivium/benches/trivium_shortint.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::shortint::prelude::*;
|
||||
use tfhe::shortint::KeySwitchingKey;
|
||||
use tfhe::{generate_keys, ConfigBuilder, FheUint64};
|
||||
|
||||
use tfhe_trivium::{TransCiphering, TriviumStreamShortint};
|
||||
|
||||
use criterion::Criterion;
|
||||
|
||||
pub fn trivium_shortint_warmup(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (hl_client_key, hl_server_key) = generate_keys(config);
|
||||
let underlying_ck: tfhe::shortint::ClientKey = (*hl_client_key.as_ref()).clone().into();
|
||||
let underlying_sk: tfhe::shortint::ServerKey = (*hl_server_key.as_ref()).clone().into();
|
||||
|
||||
let (client_key, server_key): (ClientKey, ServerKey) = gen_keys(PARAM_MESSAGE_1_CARRY_1_KS_PBS);
|
||||
|
||||
let ksk = KeySwitchingKey::new(
|
||||
(&client_key, &server_key),
|
||||
(&underlying_ck, &underlying_sk),
|
||||
PARAM_KEYSWITCH_1_1_KS_PBS_TO_2_2_KS_PBS,
|
||||
);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0; 80];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0; 80];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
c.bench_function("trivium 1_1 warmup", |b| {
|
||||
b.iter(|| {
|
||||
let cipher_key = key.map(|x| client_key.encrypt(x));
|
||||
let _trivium = TriviumStreamShortint::new(
|
||||
cipher_key,
|
||||
iv,
|
||||
server_key.clone(),
|
||||
ksk.clone(),
|
||||
hl_server_key.clone(),
|
||||
);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
pub fn trivium_shortint_gen(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (hl_client_key, hl_server_key) = generate_keys(config);
|
||||
let underlying_ck: tfhe::shortint::ClientKey = (*hl_client_key.as_ref()).clone().into();
|
||||
let underlying_sk: tfhe::shortint::ServerKey = (*hl_server_key.as_ref()).clone().into();
|
||||
|
||||
let (client_key, server_key): (ClientKey, ServerKey) = gen_keys(PARAM_MESSAGE_1_CARRY_1_KS_PBS);
|
||||
|
||||
let ksk = KeySwitchingKey::new(
|
||||
(&client_key, &server_key),
|
||||
(&underlying_ck, &underlying_sk),
|
||||
PARAM_KEYSWITCH_1_1_KS_PBS_TO_2_2_KS_PBS,
|
||||
);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0; 80];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0; 80];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let cipher_key = key.map(|x| client_key.encrypt(x));
|
||||
|
||||
let mut trivium = TriviumStreamShortint::new(cipher_key, iv, server_key, ksk, hl_server_key);
|
||||
|
||||
c.bench_function("trivium 1_1 generate 64 bits", |b| {
|
||||
b.iter(|| trivium.next_64())
|
||||
});
|
||||
}
|
||||
|
||||
pub fn trivium_shortint_trans(c: &mut Criterion) {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (hl_client_key, hl_server_key) = generate_keys(config);
|
||||
let underlying_ck: tfhe::shortint::ClientKey = (*hl_client_key.as_ref()).clone().into();
|
||||
let underlying_sk: tfhe::shortint::ServerKey = (*hl_server_key.as_ref()).clone().into();
|
||||
|
||||
let (client_key, server_key): (ClientKey, ServerKey) = gen_keys(PARAM_MESSAGE_1_CARRY_1_KS_PBS);
|
||||
|
||||
let ksk = KeySwitchingKey::new(
|
||||
(&client_key, &server_key),
|
||||
(&underlying_ck, &underlying_sk),
|
||||
PARAM_KEYSWITCH_1_1_KS_PBS_TO_2_2_KS_PBS,
|
||||
);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0; 80];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0; 80];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let cipher_key = key.map(|x| client_key.encrypt(x));
|
||||
|
||||
let ciphered_message = FheUint64::try_encrypt(0u64, &hl_client_key).unwrap();
|
||||
let mut trivium = TriviumStreamShortint::new(cipher_key, iv, server_key, ksk, hl_server_key);
|
||||
|
||||
c.bench_function("trivium 1_1 transencrypt 64 bits", |b| {
|
||||
b.iter(|| trivium.trans_encrypt_64(ciphered_message.clone()))
|
||||
});
|
||||
}
|
||||
257
apps/trivium/src/kreyvium/kreyvium.rs
Normal file
257
apps/trivium/src/kreyvium/kreyvium.rs
Normal file
@@ -0,0 +1,257 @@
|
||||
//! This module implements the Kreyvium stream cipher, using booleans or FheBool
|
||||
//! for the representation of the inner bits.
|
||||
|
||||
use crate::static_deque::StaticDeque;
|
||||
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{set_server_key, unset_server_key, FheBool, ServerKey};
|
||||
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// Internal trait specifying which operations are necessary for KreyviumStream generic type
|
||||
pub trait KreyviumBoolInput<OpOutput>:
|
||||
Sized
|
||||
+ Clone
|
||||
+ std::ops::BitXor<Output = OpOutput>
|
||||
+ std::ops::BitAnd<Output = OpOutput>
|
||||
+ std::ops::Not<Output = OpOutput>
|
||||
{
|
||||
}
|
||||
impl KreyviumBoolInput<bool> for bool {}
|
||||
impl KreyviumBoolInput<bool> for &bool {}
|
||||
impl KreyviumBoolInput<FheBool> for FheBool {}
|
||||
impl KreyviumBoolInput<FheBool> for &FheBool {}
|
||||
|
||||
/// KreyviumStream: a struct implementing the Kreyvium stream cipher, using T for the internal
|
||||
/// representation of bits (bool or FheBool). To be able to compute FHE operations, it also owns
|
||||
/// an Option for a ServerKey.
|
||||
pub struct KreyviumStream<T> {
|
||||
a: StaticDeque<93, T>,
|
||||
b: StaticDeque<84, T>,
|
||||
c: StaticDeque<111, T>,
|
||||
k: StaticDeque<128, T>,
|
||||
iv: StaticDeque<128, T>,
|
||||
fhe_key: Option<ServerKey>,
|
||||
}
|
||||
|
||||
impl KreyviumStream<bool> {
|
||||
/// Constructor for `KreyviumStream<bool>`: arguments are the secret key and the input vector.
|
||||
/// Outputs a KreyviumStream object already initialized (1152 steps have been run before
|
||||
/// returning)
|
||||
pub fn new(mut key: [bool; 128], mut iv: [bool; 128]) -> KreyviumStream<bool> {
|
||||
// Initialization of Kreyvium registers: a has the secret key, b the input vector,
|
||||
// and c a few ones.
|
||||
let mut a_register = [false; 93];
|
||||
let mut b_register = [false; 84];
|
||||
let mut c_register = [false; 111];
|
||||
|
||||
for i in 0..93 {
|
||||
a_register[i] = key[128 - 93 + i];
|
||||
}
|
||||
for i in 0..84 {
|
||||
b_register[i] = iv[128 - 84 + i];
|
||||
}
|
||||
for i in 0..44 {
|
||||
c_register[111 - 44 + i] = iv[i];
|
||||
}
|
||||
for i in 0..66 {
|
||||
c_register[i + 1] = true;
|
||||
}
|
||||
|
||||
key.reverse();
|
||||
iv.reverse();
|
||||
KreyviumStream::<bool>::new_from_registers(
|
||||
a_register, b_register, c_register, key, iv, None,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl KreyviumStream<FheBool> {
|
||||
/// Constructor for `KreyviumStream<FheBool>`: arguments are the encrypted secret key and input
|
||||
/// vector, and the FHE server key.
|
||||
/// Outputs a KreyviumStream object already initialized (1152 steps have been run before
|
||||
/// returning)
|
||||
pub fn new(
|
||||
mut key: [FheBool; 128],
|
||||
mut iv: [bool; 128],
|
||||
sk: &ServerKey,
|
||||
) -> KreyviumStream<FheBool> {
|
||||
set_server_key(sk.clone());
|
||||
|
||||
// Initialization of Kreyvium registers: a has the secret key, b the input vector,
|
||||
// and c a few ones.
|
||||
let mut a_register = [false; 93].map(FheBool::encrypt_trivial);
|
||||
let mut b_register = [false; 84].map(FheBool::encrypt_trivial);
|
||||
let mut c_register = [false; 111].map(FheBool::encrypt_trivial);
|
||||
|
||||
for i in 0..93 {
|
||||
a_register[i] = key[128 - 93 + i].clone();
|
||||
}
|
||||
for i in 0..84 {
|
||||
b_register[i] = FheBool::encrypt_trivial(iv[128 - 84 + i]);
|
||||
}
|
||||
for i in 0..44 {
|
||||
c_register[111 - 44 + i] = FheBool::encrypt_trivial(iv[i]);
|
||||
}
|
||||
for i in 0..66 {
|
||||
c_register[i + 1] = FheBool::encrypt_trivial(true);
|
||||
}
|
||||
|
||||
key.reverse();
|
||||
iv.reverse();
|
||||
let iv = iv.map(FheBool::encrypt_trivial);
|
||||
|
||||
unset_server_key();
|
||||
KreyviumStream::<FheBool>::new_from_registers(
|
||||
a_register,
|
||||
b_register,
|
||||
c_register,
|
||||
key,
|
||||
iv,
|
||||
Some(sk.clone()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> KreyviumStream<T>
|
||||
where
|
||||
T: KreyviumBoolInput<T> + std::marker::Send + std::marker::Sync,
|
||||
for<'a> &'a T: KreyviumBoolInput<T>,
|
||||
{
|
||||
/// Internal generic constructor: arguments are already prepared registers, and an optional FHE
|
||||
/// server key
|
||||
fn new_from_registers(
|
||||
a_register: [T; 93],
|
||||
b_register: [T; 84],
|
||||
c_register: [T; 111],
|
||||
k_register: [T; 128],
|
||||
iv_register: [T; 128],
|
||||
key: Option<ServerKey>,
|
||||
) -> Self {
|
||||
let mut ret = Self {
|
||||
a: StaticDeque::<93, T>::new(a_register),
|
||||
b: StaticDeque::<84, T>::new(b_register),
|
||||
c: StaticDeque::<111, T>::new(c_register),
|
||||
k: StaticDeque::<128, T>::new(k_register),
|
||||
iv: StaticDeque::<128, T>::new(iv_register),
|
||||
fhe_key: key,
|
||||
};
|
||||
ret.init();
|
||||
ret
|
||||
}
|
||||
|
||||
/// The specification of Kreyvium includes running 1152 (= 18*64) unused steps to mix up the
|
||||
/// registers, before starting the proper stream
|
||||
fn init(&mut self) {
|
||||
for _ in 0..18 {
|
||||
self.next_64();
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes one turn of the stream, updating registers and outputting the new bit.
|
||||
pub fn next_bool(&mut self) -> T {
|
||||
match &self.fhe_key {
|
||||
Some(sk) => set_server_key(sk.clone()),
|
||||
None => (),
|
||||
};
|
||||
|
||||
let [o, a, b, c] = self.get_output_and_values(0);
|
||||
|
||||
self.a.push(a);
|
||||
self.b.push(b);
|
||||
self.c.push(c);
|
||||
self.k.shift();
|
||||
self.iv.shift();
|
||||
|
||||
o
|
||||
}
|
||||
|
||||
/// Computes a potential future step of Kreyvium, n terms in the future. This does not update
|
||||
/// registers, but rather returns with the output, the three values that will be used to
|
||||
/// update the registers, when the time is right. This function is meant to be used in
|
||||
/// parallel.
|
||||
fn get_output_and_values(&self, n: usize) -> [T; 4] {
|
||||
assert!(n < 65);
|
||||
|
||||
let (((temp_a, temp_b), (temp_c, a_and)), (b_and, c_and)) = rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| &self.a[65 - n] ^ &self.a[92 - n],
|
||||
|| &self.b[68 - n] ^ &self.b[83 - n],
|
||||
)
|
||||
},
|
||||
|| {
|
||||
rayon::join(
|
||||
|| &(&self.c[65 - n] ^ &self.c[110 - n]) ^ &self.k[127 - n],
|
||||
|| &(&self.a[91 - n] & &self.a[90 - n]) ^ &self.iv[127 - n],
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
|| {
|
||||
rayon::join(
|
||||
|| &self.b[82 - n] & &self.b[81 - n],
|
||||
|| &self.c[109 - n] & &self.c[108 - n],
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
let ((o, a), (b, c)) = rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| &(&temp_a ^ &temp_b) ^ &temp_c,
|
||||
|| &temp_c ^ &(&c_and ^ &self.a[68 - n]),
|
||||
)
|
||||
},
|
||||
|| {
|
||||
rayon::join(
|
||||
|| &temp_a ^ &(&a_and ^ &self.b[77 - n]),
|
||||
|| &temp_b ^ &(&b_and ^ &self.c[86 - n]),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
[o, a, b, c]
|
||||
}
|
||||
|
||||
/// This calls `get_output_and_values` in parallel 64 times, and stores all results in a Vec.
|
||||
fn get_64_output_and_values(&self) -> Vec<[T; 4]> {
|
||||
(0..64)
|
||||
.into_par_iter()
|
||||
.map(|x| self.get_output_and_values(x))
|
||||
.rev()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Computes 64 turns of the stream, outputting the 64 bits all at once in a
|
||||
/// Vec (first value is oldest, last is newest)
|
||||
pub fn next_64(&mut self) -> Vec<T> {
|
||||
match &self.fhe_key {
|
||||
Some(sk) => {
|
||||
rayon::broadcast(|_| set_server_key(sk.clone()));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
let mut values = self.get_64_output_and_values();
|
||||
match &self.fhe_key {
|
||||
Some(_) => {
|
||||
rayon::broadcast(|_| unset_server_key());
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
|
||||
let mut ret = Vec::<T>::with_capacity(64);
|
||||
|
||||
while let Some([o, a, b, c]) = values.pop() {
|
||||
ret.push(o);
|
||||
self.a.push(a);
|
||||
self.b.push(b);
|
||||
self.c.push(c);
|
||||
}
|
||||
self.k.n_shifts(64);
|
||||
self.iv.n_shifts(64);
|
||||
ret
|
||||
}
|
||||
}
|
||||
293
apps/trivium/src/kreyvium/kreyvium_byte.rs
Normal file
293
apps/trivium/src/kreyvium/kreyvium_byte.rs
Normal file
@@ -0,0 +1,293 @@
|
||||
//! This module implements the Kreyvium stream cipher, using u8 or FheUint8
|
||||
//! for the representation of the inner bits.
|
||||
|
||||
use crate::static_deque::{StaticByteDeque, StaticByteDequeInput};
|
||||
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{set_server_key, unset_server_key, FheUint8, ServerKey};
|
||||
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// Internal trait specifying which operations are necessary for KreyviumStreamByte generic type
|
||||
pub trait KreyviumByteInput<OpOutput>:
|
||||
Sized
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Clone
|
||||
+ StaticByteDequeInput<OpOutput>
|
||||
+ std::ops::BitXor<Output = OpOutput>
|
||||
+ std::ops::BitAnd<Output = OpOutput>
|
||||
+ std::ops::Shr<u8, Output = OpOutput>
|
||||
+ std::ops::Shl<u8, Output = OpOutput>
|
||||
+ std::ops::Add<Output = OpOutput>
|
||||
{
|
||||
}
|
||||
impl KreyviumByteInput<u8> for u8 {}
|
||||
impl KreyviumByteInput<u8> for &u8 {}
|
||||
impl KreyviumByteInput<FheUint8> for FheUint8 {}
|
||||
impl KreyviumByteInput<FheUint8> for &FheUint8 {}
|
||||
|
||||
/// KreyviumStreamByte: a struct implementing the Kreyvium stream cipher, using T for the internal
|
||||
/// representation of bits (u8 or FheUint8). To be able to compute FHE operations, it also owns
|
||||
/// an Option for a ServerKey.
|
||||
/// Since the original Kreyvium registers' sizes are not a multiple of 8, these registers (which
|
||||
/// store byte-like objects) have a size that is the eighth of the closest multiple of 8 above the
|
||||
/// originals' sizes.
|
||||
pub struct KreyviumStreamByte<T> {
|
||||
a_byte: StaticByteDeque<12, T>,
|
||||
b_byte: StaticByteDeque<11, T>,
|
||||
c_byte: StaticByteDeque<14, T>,
|
||||
k_byte: StaticByteDeque<16, T>,
|
||||
iv_byte: StaticByteDeque<16, T>,
|
||||
fhe_key: Option<ServerKey>,
|
||||
}
|
||||
|
||||
impl KreyviumStreamByte<u8> {
|
||||
/// Constructor for `KreyviumStreamByte<u8>`: arguments are the secret key and the input vector.
|
||||
/// Outputs a KreyviumStream object already initialized (1152 steps have been run before
|
||||
/// returning)
|
||||
pub fn new(key_bytes: [u8; 16], iv_bytes: [u8; 16]) -> KreyviumStreamByte<u8> {
|
||||
// Initialization of Kreyvium registers: a has the secret key, b the input vector,
|
||||
// and c a few ones.
|
||||
let mut a_byte_reg = [0u8; 12];
|
||||
let mut b_byte_reg = [0u8; 11];
|
||||
let mut c_byte_reg = [0u8; 14];
|
||||
|
||||
// Copy key bits into a register
|
||||
a_byte_reg.copy_from_slice(&key_bytes[4..]);
|
||||
|
||||
// Copy iv bits into a register
|
||||
b_byte_reg.copy_from_slice(&iv_bytes[5..]);
|
||||
|
||||
// Copy a lot of ones in the c register
|
||||
c_byte_reg[0] = 252;
|
||||
c_byte_reg[1..8].fill(255);
|
||||
|
||||
// Copy iv bits in the c register
|
||||
c_byte_reg[8] = (iv_bytes[0] << 4) | 31;
|
||||
for b in 9..14 {
|
||||
c_byte_reg[b] = (iv_bytes[b - 9] >> 4) | (iv_bytes[b - 8] << 4);
|
||||
}
|
||||
|
||||
// Key and iv are stored in reverse in their shift registers
|
||||
let mut key = key_bytes.map(|b| b.reverse_bits());
|
||||
let mut iv = iv_bytes.map(|b| b.reverse_bits());
|
||||
key.reverse();
|
||||
iv.reverse();
|
||||
|
||||
let mut ret = KreyviumStreamByte::<u8>::new_from_registers(
|
||||
a_byte_reg, b_byte_reg, c_byte_reg, key, iv, None,
|
||||
);
|
||||
ret.init();
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
impl KreyviumStreamByte<FheUint8> {
|
||||
/// Constructor for `KreyviumStream<FheUint8>`: arguments are the encrypted secret key and input
|
||||
/// vector, and the FHE server key.
|
||||
/// Outputs a KreyviumStream object already initialized (1152 steps have been run before
|
||||
/// returning)
|
||||
pub fn new(
|
||||
key_bytes: [FheUint8; 16],
|
||||
iv_bytes: [u8; 16],
|
||||
server_key: &ServerKey,
|
||||
) -> KreyviumStreamByte<FheUint8> {
|
||||
set_server_key(server_key.clone());
|
||||
|
||||
// Initialization of Kreyvium registers: a has the secret key, b the input vector,
|
||||
// and c a few ones.
|
||||
let mut a_byte_reg = [0u8; 12].map(FheUint8::encrypt_trivial);
|
||||
let mut b_byte_reg = [0u8; 11].map(FheUint8::encrypt_trivial);
|
||||
let mut c_byte_reg = [0u8; 14].map(FheUint8::encrypt_trivial);
|
||||
|
||||
// Copy key bits into a register
|
||||
a_byte_reg.clone_from_slice(&key_bytes[4..]);
|
||||
|
||||
// Copy iv bits into a register
|
||||
for b in 0..11 {
|
||||
b_byte_reg[b] = FheUint8::encrypt_trivial(iv_bytes[b + 5]);
|
||||
}
|
||||
// Copy a lot of ones in the c register
|
||||
c_byte_reg[0] = FheUint8::encrypt_trivial(252u8);
|
||||
|
||||
c_byte_reg[1..8].fill_with(|| FheUint8::encrypt_trivial(255u8));
|
||||
|
||||
// Copy iv bits in the c register
|
||||
c_byte_reg[8] = FheUint8::encrypt_trivial((&iv_bytes[0] << 4u8) | 31u8);
|
||||
for b in 9..14 {
|
||||
c_byte_reg[b] =
|
||||
FheUint8::encrypt_trivial((&iv_bytes[b - 9] >> 4u8) | (&iv_bytes[b - 8] << 4u8));
|
||||
}
|
||||
|
||||
// Key and iv are stored in reverse in their shift registers
|
||||
let mut key = key_bytes.map(|b| b.map(|x| (x as u8).reverse_bits() as u64));
|
||||
let mut iv = iv_bytes.map(|x| FheUint8::encrypt_trivial(x.reverse_bits()));
|
||||
key.reverse();
|
||||
iv.reverse();
|
||||
|
||||
unset_server_key();
|
||||
|
||||
let mut ret = KreyviumStreamByte::<FheUint8>::new_from_registers(
|
||||
a_byte_reg,
|
||||
b_byte_reg,
|
||||
c_byte_reg,
|
||||
key,
|
||||
iv,
|
||||
Some(server_key.clone()),
|
||||
);
|
||||
ret.init();
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> KreyviumStreamByte<T>
|
||||
where
|
||||
T: KreyviumByteInput<T> + Send,
|
||||
for<'a> &'a T: KreyviumByteInput<T>,
|
||||
{
|
||||
/// Internal generic constructor: arguments are already prepared registers, and an optional FHE
|
||||
/// server key
|
||||
fn new_from_registers(
|
||||
a_register: [T; 12],
|
||||
b_register: [T; 11],
|
||||
c_register: [T; 14],
|
||||
k_register: [T; 16],
|
||||
iv_register: [T; 16],
|
||||
sk: Option<ServerKey>,
|
||||
) -> Self {
|
||||
Self {
|
||||
a_byte: StaticByteDeque::<12, T>::new(a_register),
|
||||
b_byte: StaticByteDeque::<11, T>::new(b_register),
|
||||
c_byte: StaticByteDeque::<14, T>::new(c_register),
|
||||
k_byte: StaticByteDeque::<16, T>::new(k_register),
|
||||
iv_byte: StaticByteDeque::<16, T>::new(iv_register),
|
||||
fhe_key: sk,
|
||||
}
|
||||
}
|
||||
|
||||
/// The specification of Kreyvium includes running 1152 (= 18*64) unused steps to mix up the
|
||||
/// registers, before starting the proper stream
|
||||
fn init(&mut self) {
|
||||
for _ in 0..18 {
|
||||
self.next_64();
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes 8 potential future step of Kreyvium, b*8 terms in the future. This does not update
|
||||
/// registers, but rather returns with the output, the three values that will be used to
|
||||
/// update the registers, when the time is right. This function is meant to be used in
|
||||
/// parallel.
|
||||
fn get_output_and_values(&self, b: usize) -> [T; 4] {
|
||||
let n = b * 8 + 7;
|
||||
assert!(n < 65);
|
||||
|
||||
let (((k, iv), (a1, a2, a3, a4, a5)), ((b1, b2, b3, b4, b5), (c1, c2, c3, c4, c5))) =
|
||||
rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| (self.k_byte.byte(127 - n), self.iv_byte.byte(127 - n)),
|
||||
|| Self::get_bytes(&self.a_byte, [91 - n, 90 - n, 68 - n, 65 - n, 92 - n]),
|
||||
)
|
||||
},
|
||||
|| {
|
||||
rayon::join(
|
||||
|| Self::get_bytes(&self.b_byte, [82 - n, 81 - n, 77 - n, 68 - n, 83 - n]),
|
||||
|| {
|
||||
Self::get_bytes(
|
||||
&self.c_byte,
|
||||
[109 - n, 108 - n, 86 - n, 65 - n, 110 - n],
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
let (((temp_a, temp_b), (temp_c, a_and)), (b_and, c_and)) = rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| rayon::join(|| a4 ^ a5, || b4 ^ b5),
|
||||
|| rayon::join(|| c4 ^ c5 ^ k, || a1 & a2 ^ iv),
|
||||
)
|
||||
},
|
||||
|| rayon::join(|| b1 & b2, || c1 & c2),
|
||||
);
|
||||
|
||||
let (temp_a_2, temp_b_2, temp_c_2) = (temp_a.clone(), temp_b.clone(), temp_c.clone());
|
||||
|
||||
let ((o, a), (b, c)) = rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| (temp_a_2 ^ temp_b_2) ^ temp_c_2,
|
||||
|| temp_c ^ ((c_and) ^ a3),
|
||||
)
|
||||
},
|
||||
|| rayon::join(|| temp_a ^ (a_and ^ b3), || temp_b ^ (b_and ^ c3)),
|
||||
);
|
||||
|
||||
[o, a, b, c]
|
||||
}
|
||||
|
||||
/// This calls `get_output_and_values` in parallel 8 times, and stores all results in a Vec.
|
||||
fn get_64_output_and_values(&self) -> Vec<[T; 4]> {
|
||||
(0..8)
|
||||
.into_par_iter()
|
||||
.map(|i| self.get_output_and_values(i))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Computes 64 turns of the stream, outputting the 64 bits (in 8 bytes) all at once in a
|
||||
/// Vec (first value is oldest, last is newest)
|
||||
pub fn next_64(&mut self) -> Vec<T> {
|
||||
match &self.fhe_key {
|
||||
Some(sk) => {
|
||||
rayon::broadcast(|_| set_server_key(sk.clone()));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
let values = self.get_64_output_and_values();
|
||||
match &self.fhe_key {
|
||||
Some(_) => {
|
||||
rayon::broadcast(|_| unset_server_key());
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
|
||||
let mut bytes = Vec::<T>::with_capacity(8);
|
||||
for [o, a, b, c] in values {
|
||||
self.a_byte.push(a);
|
||||
self.b_byte.push(b);
|
||||
self.c_byte.push(c);
|
||||
bytes.push(o);
|
||||
}
|
||||
self.k_byte.n_shifts(8);
|
||||
self.iv_byte.n_shifts(8);
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
/// Reconstructs a bunch of 5 bytes in a parallel fashion.
|
||||
fn get_bytes<const N: usize>(
|
||||
reg: &StaticByteDeque<N, T>,
|
||||
offsets: [usize; 5],
|
||||
) -> (T, T, T, T, T) {
|
||||
let mut ret = offsets
|
||||
.par_iter()
|
||||
.rev()
|
||||
.map(|&i| reg.byte(i))
|
||||
.collect::<Vec<_>>();
|
||||
(
|
||||
ret.pop().unwrap(),
|
||||
ret.pop().unwrap(),
|
||||
ret.pop().unwrap(),
|
||||
ret.pop().unwrap(),
|
||||
ret.pop().unwrap(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl KreyviumStreamByte<FheUint8> {
|
||||
pub fn get_server_key(&self) -> &ServerKey {
|
||||
self.fhe_key.as_ref().unwrap()
|
||||
}
|
||||
}
|
||||
205
apps/trivium/src/kreyvium/kreyvium_shortint.rs
Normal file
205
apps/trivium/src/kreyvium/kreyvium_shortint.rs
Normal file
@@ -0,0 +1,205 @@
|
||||
use crate::static_deque::StaticDeque;
|
||||
|
||||
use tfhe::shortint::prelude::*;
|
||||
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// KreyviumStreamShortint: a struct implementing the Kreyvium stream cipher, using a generic
|
||||
/// Ciphertext for the internal representation of bits (intended to represent a single bit). To be
|
||||
/// able to compute FHE operations, it also owns a ServerKey.
|
||||
pub struct KreyviumStreamShortint {
|
||||
a: StaticDeque<93, Ciphertext>,
|
||||
b: StaticDeque<84, Ciphertext>,
|
||||
c: StaticDeque<111, Ciphertext>,
|
||||
k: StaticDeque<128, Ciphertext>,
|
||||
iv: StaticDeque<128, Ciphertext>,
|
||||
internal_server_key: ServerKey,
|
||||
transciphering_casting_key: KeySwitchingKey,
|
||||
hl_server_key: tfhe::ServerKey,
|
||||
}
|
||||
|
||||
impl KreyviumStreamShortint {
|
||||
/// Constructor for KreyviumStreamShortint: arguments are the secret key and the input vector,
|
||||
/// and a ServerKey reference. Outputs a KreyviumStream object already initialized (1152
|
||||
/// steps have been run before returning)
|
||||
pub fn new(
|
||||
mut key: [Ciphertext; 128],
|
||||
mut iv: [u64; 128],
|
||||
sk: ServerKey,
|
||||
ksk: KeySwitchingKey,
|
||||
hl_sk: tfhe::ServerKey,
|
||||
) -> Self {
|
||||
// Initialization of Kreyvium registers: a has the secret key, b the input vector,
|
||||
// and c a few ones.
|
||||
let mut a_register: [Ciphertext; 93] = [0; 93].map(|x| sk.create_trivial(x));
|
||||
let mut b_register: [Ciphertext; 84] = [0; 84].map(|x| sk.create_trivial(x));
|
||||
let mut c_register: [Ciphertext; 111] = [0; 111].map(|x| sk.create_trivial(x));
|
||||
|
||||
for i in 0..93 {
|
||||
a_register[i] = key[128 - 93 + i].clone();
|
||||
}
|
||||
for i in 0..84 {
|
||||
b_register[i] = sk.create_trivial(iv[128 - 84 + i]);
|
||||
}
|
||||
for i in 0..44 {
|
||||
c_register[111 - 44 + i] = sk.create_trivial(iv[i]);
|
||||
}
|
||||
for i in 0..66 {
|
||||
c_register[i + 1] = sk.create_trivial(1);
|
||||
}
|
||||
|
||||
key.reverse();
|
||||
iv.reverse();
|
||||
let iv = iv.map(|x| sk.create_trivial(x));
|
||||
|
||||
let mut ret = Self {
|
||||
a: StaticDeque::<93, Ciphertext>::new(a_register),
|
||||
b: StaticDeque::<84, Ciphertext>::new(b_register),
|
||||
c: StaticDeque::<111, Ciphertext>::new(c_register),
|
||||
k: StaticDeque::<128, Ciphertext>::new(key),
|
||||
iv: StaticDeque::<128, Ciphertext>::new(iv),
|
||||
internal_server_key: sk,
|
||||
transciphering_casting_key: ksk,
|
||||
hl_server_key: hl_sk,
|
||||
};
|
||||
ret.init();
|
||||
ret
|
||||
}
|
||||
|
||||
/// The specification of Kreyvium includes running 1152 (= 18*64) unused steps to mix up the
|
||||
/// registers, before starting the proper stream
|
||||
fn init(&mut self) {
|
||||
for _ in 0..18 {
|
||||
self.next_64();
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes one turn of the stream, updating registers and outputting the new bit.
|
||||
pub fn next_ct(&mut self) -> Ciphertext {
|
||||
let [o, a, b, c] = self.get_output_and_values(0);
|
||||
|
||||
self.a.push(a);
|
||||
self.b.push(b);
|
||||
self.c.push(c);
|
||||
|
||||
o
|
||||
}
|
||||
|
||||
/// Computes a potential future step of Kreyvium, n terms in the future. This does not update
|
||||
/// registers, but rather returns with the output, the three values that will be used to
|
||||
/// update the registers, when the time is right. This function is meant to be used in
|
||||
/// parallel.
|
||||
fn get_output_and_values(&self, n: usize) -> [Ciphertext; 4] {
|
||||
let (k, iv) = (&self.k[127 - n], &self.iv[127 - n]);
|
||||
|
||||
let (a1, a2, a3, a4, a5) = (
|
||||
&self.a[65 - n],
|
||||
&self.a[92 - n],
|
||||
&self.a[91 - n],
|
||||
&self.a[90 - n],
|
||||
&self.a[68 - n],
|
||||
);
|
||||
let (b1, b2, b3, b4, b5) = (
|
||||
&self.b[68 - n],
|
||||
&self.b[83 - n],
|
||||
&self.b[82 - n],
|
||||
&self.b[81 - n],
|
||||
&self.b[77 - n],
|
||||
);
|
||||
let (c1, c2, c3, c4, c5) = (
|
||||
&self.c[65 - n],
|
||||
&self.c[110 - n],
|
||||
&self.c[109 - n],
|
||||
&self.c[108 - n],
|
||||
&self.c[86 - n],
|
||||
);
|
||||
|
||||
let temp_a = self.internal_server_key.unchecked_add(a1, a2);
|
||||
let temp_b = self.internal_server_key.unchecked_add(b1, b2);
|
||||
let mut temp_c = self.internal_server_key.unchecked_add(c1, c2);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut temp_c, k);
|
||||
|
||||
let ((new_a, new_b), (new_c, o)) = rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| {
|
||||
let mut new_a = self.internal_server_key.unchecked_bitand(c3, c4);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_a, a5);
|
||||
self.internal_server_key.add_assign(&mut new_a, &temp_c);
|
||||
new_a
|
||||
},
|
||||
|| {
|
||||
let mut new_b = self.internal_server_key.unchecked_bitand(a3, a4);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_b, b5);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_b, &temp_a);
|
||||
self.internal_server_key.add_assign(&mut new_b, iv);
|
||||
new_b
|
||||
},
|
||||
)
|
||||
},
|
||||
|| {
|
||||
rayon::join(
|
||||
|| {
|
||||
let mut new_c = self.internal_server_key.unchecked_bitand(b3, b4);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_c, c5);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_c, &temp_b);
|
||||
self.internal_server_key.message_extract_assign(&mut new_c);
|
||||
new_c
|
||||
},
|
||||
|| {
|
||||
self.internal_server_key.bitxor(
|
||||
&self.internal_server_key.unchecked_add(&temp_a, &temp_b),
|
||||
&temp_c,
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
[o, new_a, new_b, new_c]
|
||||
}
|
||||
|
||||
/// This calls `get_output_and_values` in parallel 64 times, and stores all results in a Vec.
|
||||
fn get_64_output_and_values(&self) -> Vec<[Ciphertext; 4]> {
|
||||
(0..64)
|
||||
.into_par_iter()
|
||||
.map(|x| self.get_output_and_values(x))
|
||||
.rev()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Computes 64 turns of the stream, outputting the 64 bits all at once in a
|
||||
/// Vec (first value is oldest, last is newest)
|
||||
pub fn next_64(&mut self) -> Vec<Ciphertext> {
|
||||
let mut values = self.get_64_output_and_values();
|
||||
|
||||
let mut ret = Vec::<Ciphertext>::with_capacity(64);
|
||||
while let Some([o, a, b, c]) = values.pop() {
|
||||
ret.push(o);
|
||||
self.a.push(a);
|
||||
self.b.push(b);
|
||||
self.c.push(c);
|
||||
}
|
||||
self.k.n_shifts(64);
|
||||
self.iv.n_shifts(64);
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn get_internal_server_key(&self) -> &ServerKey {
|
||||
&self.internal_server_key
|
||||
}
|
||||
|
||||
pub fn get_casting_key(&self) -> &KeySwitchingKey {
|
||||
&self.transciphering_casting_key
|
||||
}
|
||||
|
||||
pub fn get_hl_server_key(&self) -> &tfhe::ServerKey {
|
||||
&self.hl_server_key
|
||||
}
|
||||
}
|
||||
12
apps/trivium/src/kreyvium/mod.rs
Normal file
12
apps/trivium/src/kreyvium/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
#[allow(clippy::module_inception)]
|
||||
mod kreyvium;
|
||||
pub use kreyvium::KreyviumStream;
|
||||
|
||||
mod kreyvium_byte;
|
||||
pub use kreyvium_byte::KreyviumStreamByte;
|
||||
|
||||
mod kreyvium_shortint;
|
||||
pub use kreyvium_shortint::KreyviumStreamShortint;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
378
apps/trivium/src/kreyvium/test.rs
Normal file
378
apps/trivium/src/kreyvium/test.rs
Normal file
@@ -0,0 +1,378 @@
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{generate_keys, ConfigBuilder, FheBool, FheUint64, FheUint8};
|
||||
|
||||
use crate::{KreyviumStream, KreyviumStreamByte, KreyviumStreamShortint, TransCiphering};
|
||||
|
||||
// Values for these tests come from the github repo renaud1239/Kreyvium,
|
||||
// commit fd6828f68711276c25f55e605935028f5e843f43
|
||||
|
||||
fn get_hexadecimal_string_from_lsb_first_stream(a: Vec<bool>) -> String {
|
||||
assert!(a.len() % 8 == 0);
|
||||
let mut hexadecimal: String = "".to_string();
|
||||
for test in a.chunks(8) {
|
||||
// Encoding is bytes in LSB order
|
||||
match test[4..8] {
|
||||
[false, false, false, false] => hexadecimal.push('0'),
|
||||
[true, false, false, false] => hexadecimal.push('1'),
|
||||
[false, true, false, false] => hexadecimal.push('2'),
|
||||
[true, true, false, false] => hexadecimal.push('3'),
|
||||
|
||||
[false, false, true, false] => hexadecimal.push('4'),
|
||||
[true, false, true, false] => hexadecimal.push('5'),
|
||||
[false, true, true, false] => hexadecimal.push('6'),
|
||||
[true, true, true, false] => hexadecimal.push('7'),
|
||||
|
||||
[false, false, false, true] => hexadecimal.push('8'),
|
||||
[true, false, false, true] => hexadecimal.push('9'),
|
||||
[false, true, false, true] => hexadecimal.push('A'),
|
||||
[true, true, false, true] => hexadecimal.push('B'),
|
||||
|
||||
[false, false, true, true] => hexadecimal.push('C'),
|
||||
[true, false, true, true] => hexadecimal.push('D'),
|
||||
[false, true, true, true] => hexadecimal.push('E'),
|
||||
[true, true, true, true] => hexadecimal.push('F'),
|
||||
_ => (),
|
||||
};
|
||||
match test[0..4] {
|
||||
[false, false, false, false] => hexadecimal.push('0'),
|
||||
[true, false, false, false] => hexadecimal.push('1'),
|
||||
[false, true, false, false] => hexadecimal.push('2'),
|
||||
[true, true, false, false] => hexadecimal.push('3'),
|
||||
|
||||
[false, false, true, false] => hexadecimal.push('4'),
|
||||
[true, false, true, false] => hexadecimal.push('5'),
|
||||
[false, true, true, false] => hexadecimal.push('6'),
|
||||
[true, true, true, false] => hexadecimal.push('7'),
|
||||
|
||||
[false, false, false, true] => hexadecimal.push('8'),
|
||||
[true, false, false, true] => hexadecimal.push('9'),
|
||||
[false, true, false, true] => hexadecimal.push('A'),
|
||||
[true, true, false, true] => hexadecimal.push('B'),
|
||||
|
||||
[false, false, true, true] => hexadecimal.push('C'),
|
||||
[true, false, true, true] => hexadecimal.push('D'),
|
||||
[false, true, true, true] => hexadecimal.push('E'),
|
||||
[true, true, true, true] => hexadecimal.push('F'),
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
hexadecimal
|
||||
}
|
||||
|
||||
fn get_hexagonal_string_from_bytes(a: Vec<u8>) -> String {
|
||||
assert!(a.len() % 8 == 0);
|
||||
let mut hexadecimal: String = "".to_string();
|
||||
for test in a {
|
||||
hexadecimal.push_str(&format!("{:02X?}", test));
|
||||
}
|
||||
hexadecimal
|
||||
}
|
||||
|
||||
fn get_hexagonal_string_from_u64(a: Vec<u64>) -> String {
|
||||
let mut hexadecimal: String = "".to_string();
|
||||
for test in a {
|
||||
hexadecimal.push_str(&format!("{:016X?}", test));
|
||||
}
|
||||
hexadecimal
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kreyvium_test_1() {
|
||||
let key = [false; 128];
|
||||
let iv = [false; 128];
|
||||
let output = "26DCF1F4BC0F1922";
|
||||
|
||||
let mut kreyvium = KreyviumStream::<bool>::new(key, iv);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(64);
|
||||
while vec.len() < 64 {
|
||||
vec.push(kreyvium.next_bool());
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(output, hexadecimal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kreyvium_test_2() {
|
||||
let mut key = [false; 128];
|
||||
let iv = [false; 128];
|
||||
key[0] = true;
|
||||
|
||||
let output = "4FD421D4DA3D2C8A";
|
||||
|
||||
let mut kreyvium = KreyviumStream::<bool>::new(key, iv);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(64);
|
||||
while vec.len() < 64 {
|
||||
vec.push(kreyvium.next_bool());
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(output, hexadecimal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kreyvium_test_3() {
|
||||
let key = [false; 128];
|
||||
let mut iv = [false; 128];
|
||||
iv[0] = true;
|
||||
|
||||
let output = "C9217BA0D762ACA1";
|
||||
|
||||
let mut kreyvium = KreyviumStream::<bool>::new(key, iv);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(64);
|
||||
while vec.len() < 64 {
|
||||
vec.push(kreyvium.next_bool());
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(output, hexadecimal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kreyvium_test_4() {
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [false; 128];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [false; 128];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let output = "D1F0303482061111";
|
||||
|
||||
let mut kreyvium = KreyviumStream::<bool>::new(key, iv);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(64);
|
||||
while vec.len() < 64 {
|
||||
vec.push(kreyvium.next_bool());
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(hexadecimal, output);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kreyvium_test_fhe_long() {
|
||||
let config = ConfigBuilder::all_disabled().enable_default_bool().build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [false; 128];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [false; 128];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let output = "D1F0303482061111";
|
||||
|
||||
let cipher_key = key.map(|x| FheBool::encrypt(x, &client_key));
|
||||
|
||||
let mut kreyvium = KreyviumStream::<FheBool>::new(cipher_key, iv, &server_key);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(64);
|
||||
while vec.len() < 64 {
|
||||
let cipher_outputs = kreyvium.next_64();
|
||||
for c in cipher_outputs {
|
||||
vec.push(c.decrypt(&client_key))
|
||||
}
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(output, hexadecimal);
|
||||
}
|
||||
|
||||
use tfhe::shortint::prelude::*;
|
||||
|
||||
#[test]
|
||||
fn kreyvium_test_shortint_long() {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (hl_client_key, hl_server_key) = generate_keys(config);
|
||||
let underlying_ck: tfhe::shortint::ClientKey = (*hl_client_key.as_ref()).clone().into();
|
||||
let underlying_sk: tfhe::shortint::ServerKey = (*hl_server_key.as_ref()).clone().into();
|
||||
|
||||
let (client_key, server_key): (ClientKey, ServerKey) = gen_keys(PARAM_MESSAGE_1_CARRY_1_KS_PBS);
|
||||
|
||||
let ksk = KeySwitchingKey::new(
|
||||
(&client_key, &server_key),
|
||||
(&underlying_ck, &underlying_sk),
|
||||
PARAM_KEYSWITCH_1_1_KS_PBS_TO_2_2_KS_PBS,
|
||||
);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [0; 128];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [0; 128];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
let output = "D1F0303482061111".to_string();
|
||||
|
||||
let cipher_key = key.map(|x| client_key.encrypt(x));
|
||||
|
||||
let ciphered_message = FheUint64::try_encrypt(0u64, &hl_client_key).unwrap();
|
||||
|
||||
let mut kreyvium = KreyviumStreamShortint::new(cipher_key, iv, server_key, ksk, hl_server_key);
|
||||
|
||||
let trans_ciphered_message = kreyvium.trans_encrypt_64(ciphered_message);
|
||||
let ciphered_message = trans_ciphered_message.decrypt(&hl_client_key);
|
||||
|
||||
let hexadecimal = get_hexagonal_string_from_u64(vec![ciphered_message]);
|
||||
assert_eq!(output, hexadecimal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kreyvium_test_clear_byte() {
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key_bytes = [0u8; 16];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key_bytes[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv_bytes = [0u8; 16];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv_bytes[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let output = "D1F0303482061111".to_string();
|
||||
|
||||
let mut kreyvium = KreyviumStreamByte::<u8>::new(key_bytes, iv_bytes);
|
||||
|
||||
let mut vec = Vec::<u8>::with_capacity(8);
|
||||
while vec.len() < 8 {
|
||||
let outputs = kreyvium.next_64();
|
||||
for c in outputs {
|
||||
vec.push(c)
|
||||
}
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexagonal_string_from_bytes(vec);
|
||||
assert_eq!(output, hexadecimal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kreyvium_test_byte_long() {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.enable_function_evaluation_integers()
|
||||
.build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key_bytes = [0u8; 16];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key_bytes[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv_bytes = [0u8; 16];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv_bytes[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let cipher_key = key_bytes.map(|x| FheUint8::encrypt(x, &client_key));
|
||||
|
||||
let output = "D1F0303482061111".to_string();
|
||||
|
||||
let mut kreyvium = KreyviumStreamByte::<FheUint8>::new(cipher_key, iv_bytes, &server_key);
|
||||
|
||||
let mut vec = Vec::<u8>::with_capacity(8);
|
||||
while vec.len() < 8 {
|
||||
let cipher_outputs = kreyvium.next_64();
|
||||
for c in cipher_outputs {
|
||||
vec.push(c.decrypt(&client_key))
|
||||
}
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexagonal_string_from_bytes(vec);
|
||||
assert_eq!(output, hexadecimal);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn kreyvium_test_fhe_byte_transciphering_long() {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.enable_function_evaluation_integers()
|
||||
.build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB000000000000".to_string();
|
||||
let mut key = [0u8; 16];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC000000000000".to_string();
|
||||
let mut iv = [0u8; 16];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let output = "D1F0303482061111".to_string();
|
||||
|
||||
let cipher_key = key.map(|x| FheUint8::encrypt(x, &client_key));
|
||||
|
||||
let ciphered_message = FheUint64::try_encrypt(0u64, &client_key).unwrap();
|
||||
|
||||
let mut kreyvium = KreyviumStreamByte::<FheUint8>::new(cipher_key, iv, &server_key);
|
||||
|
||||
let trans_ciphered_message = kreyvium.trans_encrypt_64(ciphered_message);
|
||||
let ciphered_message = trans_ciphered_message.decrypt(&client_key);
|
||||
|
||||
let hexadecimal = get_hexagonal_string_from_u64(vec![ciphered_message]);
|
||||
assert_eq!(output, hexadecimal);
|
||||
}
|
||||
10
apps/trivium/src/lib.rs
Normal file
10
apps/trivium/src/lib.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
mod static_deque;
|
||||
|
||||
mod kreyvium;
|
||||
pub use kreyvium::{KreyviumStream, KreyviumStreamByte, KreyviumStreamShortint};
|
||||
|
||||
mod trivium;
|
||||
pub use trivium::{TriviumStream, TriviumStreamByte, TriviumStreamShortint};
|
||||
|
||||
mod trans_ciphering;
|
||||
pub use trans_ciphering::TransCiphering;
|
||||
5
apps/trivium/src/static_deque/mod.rs
Normal file
5
apps/trivium/src/static_deque/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
#[allow(clippy::module_inception)]
|
||||
mod static_deque;
|
||||
pub use static_deque::StaticDeque;
|
||||
mod static_byte_deque;
|
||||
pub use static_byte_deque::{StaticByteDeque, StaticByteDequeInput};
|
||||
141
apps/trivium/src/static_deque/static_byte_deque.rs
Normal file
141
apps/trivium/src/static_deque/static_byte_deque.rs
Normal file
@@ -0,0 +1,141 @@
|
||||
//! This module implements the StaticByteDeque struct: a deque of bytes. The idea
|
||||
//! is that this is a wrapper around StaticDeque, but StaticByteDeque has an additional
|
||||
//! functionality: it can construct the "intermediate" bytes, made of parts of other bytes.
|
||||
//! This is pretending to store bits, and allows accessing bits in chunks of 8 consecutive.
|
||||
|
||||
use crate::static_deque::StaticDeque;
|
||||
|
||||
use tfhe::FheUint8;
|
||||
|
||||
/// Internal trait specifying which operations are needed by StaticByteDeque
|
||||
pub trait StaticByteDequeInput<OpOutput>:
|
||||
Clone
|
||||
+ std::ops::Shr<u8, Output = OpOutput>
|
||||
+ std::ops::Shl<u8, Output = OpOutput>
|
||||
+ std::ops::BitOr<Output = OpOutput>
|
||||
{
|
||||
}
|
||||
impl StaticByteDequeInput<u8> for u8 {}
|
||||
impl StaticByteDequeInput<u8> for &u8 {}
|
||||
impl StaticByteDequeInput<FheUint8> for FheUint8 {}
|
||||
impl StaticByteDequeInput<FheUint8> for &FheUint8 {}
|
||||
|
||||
/// Here T must represent a type covering a byte, like u8 or FheUint8.
|
||||
#[derive(Clone)]
|
||||
pub struct StaticByteDeque<const N: usize, T> {
|
||||
deque: StaticDeque<N, T>,
|
||||
}
|
||||
|
||||
impl<const N: usize, T> StaticByteDeque<N, T>
|
||||
where
|
||||
T: StaticByteDequeInput<T>,
|
||||
for<'a> &'a T: StaticByteDequeInput<T>,
|
||||
{
|
||||
/// Constructor always uses a fully initialized array, the first element of
|
||||
/// which is oldest, the last is newest
|
||||
pub fn new(_arr: [T; N]) -> Self {
|
||||
Self {
|
||||
deque: StaticDeque::<N, T>::new(_arr),
|
||||
}
|
||||
}
|
||||
|
||||
/// Elements are pushed via a byte element (covering 8 underlying bits)
|
||||
pub fn push(&mut self, val: T) {
|
||||
self.deque.push(val)
|
||||
}
|
||||
|
||||
/// computes n shift in a row
|
||||
pub fn n_shifts(&mut self, n: usize) {
|
||||
self.deque.n_shifts(n);
|
||||
}
|
||||
|
||||
/// Getter for the internal memory
|
||||
#[allow(dead_code)]
|
||||
fn get_arr(&self) -> &[T; N] {
|
||||
self.deque.get_arr()
|
||||
}
|
||||
|
||||
/// This returns a byte full of zeros, except maybe a one
|
||||
/// at the specified location, if it is present in the deque
|
||||
#[allow(dead_code)]
|
||||
fn bit(&self, i: usize) -> T
|
||||
where
|
||||
for<'a> &'a T: std::ops::BitAnd<u8, Output = T>,
|
||||
{
|
||||
let byte: &T = &self.deque[i / 8];
|
||||
let bit_selector: u8 = 1u8 << (i % 8);
|
||||
byte & bit_selector
|
||||
}
|
||||
|
||||
/// This function reconstructs an intermediate byte if necessary
|
||||
pub fn byte(&self, i: usize) -> T {
|
||||
let byte: &T = &self.deque[i / 8];
|
||||
let bit_idx: u8 = (i % 8) as u8;
|
||||
|
||||
if bit_idx == 0 {
|
||||
return byte.clone();
|
||||
}
|
||||
|
||||
let byte_next: &T = &self.deque[i / 8 + 1];
|
||||
(byte << bit_idx) | (byte_next >> (8 - bit_idx))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::static_deque::StaticByteDeque;
|
||||
|
||||
#[test]
|
||||
fn byte_deque_test() {
|
||||
let mut deque = StaticByteDeque::<3, u8>::new([2, 64, 128]);
|
||||
deque.push(4);
|
||||
|
||||
// Youngest: 4
|
||||
assert!(deque.bit(0) == 0);
|
||||
assert!(deque.bit(1) == 0);
|
||||
assert!(deque.bit(2) > 0);
|
||||
assert!(deque.bit(3) == 0);
|
||||
assert!(deque.bit(4) == 0);
|
||||
assert!(deque.bit(5) == 0);
|
||||
assert!(deque.bit(6) == 0);
|
||||
assert!(deque.bit(7) == 0);
|
||||
|
||||
// second youngest: 128
|
||||
assert!(deque.bit(8) == 0);
|
||||
assert!(deque.bit(8 + 1) == 0);
|
||||
assert!(deque.bit(8 + 2) == 0);
|
||||
assert!(deque.bit(8 + 3) == 0);
|
||||
assert!(deque.bit(8 + 4) == 0);
|
||||
assert!(deque.bit(8 + 5) == 0);
|
||||
assert!(deque.bit(8 + 6) == 0);
|
||||
assert!(deque.bit(8 + 7) > 0);
|
||||
|
||||
// oldest: 64
|
||||
assert!(deque.bit(16) == 0);
|
||||
assert!(deque.bit(16 + 1) == 0);
|
||||
assert!(deque.bit(16 + 2) == 0);
|
||||
assert!(deque.bit(16 + 3) == 0);
|
||||
assert!(deque.bit(16 + 4) == 0);
|
||||
assert!(deque.bit(16 + 5) == 0);
|
||||
assert!(deque.bit(16 + 6) > 0);
|
||||
assert!(deque.bit(16 + 7) == 0);
|
||||
|
||||
assert_eq!(deque.byte(0), 4u8);
|
||||
assert_eq!(deque.byte(1), 9u8);
|
||||
assert_eq!(deque.byte(2), 18u8);
|
||||
assert_eq!(deque.byte(3), 36u8);
|
||||
assert_eq!(deque.byte(4), 72u8);
|
||||
assert_eq!(deque.byte(5), 144u8);
|
||||
assert_eq!(deque.byte(6), 32u8);
|
||||
assert_eq!(deque.byte(7), 64u8);
|
||||
assert_eq!(deque.byte(8), 128u8);
|
||||
assert_eq!(deque.byte(9), 0u8);
|
||||
assert_eq!(deque.byte(10), 1u8);
|
||||
assert_eq!(deque.byte(11), 2u8);
|
||||
assert_eq!(deque.byte(12), 4u8);
|
||||
assert_eq!(deque.byte(13), 8u8);
|
||||
assert_eq!(deque.byte(14), 16u8);
|
||||
assert_eq!(deque.byte(15), 32u8);
|
||||
assert_eq!(deque.byte(16), 64u8);
|
||||
}
|
||||
}
|
||||
135
apps/trivium/src/static_deque/static_deque.rs
Normal file
135
apps/trivium/src/static_deque/static_deque.rs
Normal file
@@ -0,0 +1,135 @@
|
||||
//! This module implements the StaticDeque struct: a deque utility whose size
|
||||
//! is known at compile time. Construction, push, and indexing are publicly
|
||||
//! available.
|
||||
|
||||
use core::ops::{Index, IndexMut};
|
||||
|
||||
/// StaticDeque: a struct implementing a deque whose size is known at compile time.
|
||||
/// It has 2 members: the static array containing the data (never empty), and a cursor
|
||||
/// equal to the index of the oldest element (and the next one to be overwritten).
|
||||
#[derive(Clone)]
|
||||
pub struct StaticDeque<const N: usize, T> {
|
||||
arr: [T; N],
|
||||
cursor: usize,
|
||||
}
|
||||
|
||||
impl<const N: usize, T> StaticDeque<N, T> {
|
||||
/// Constructor always uses a fully initialized array, the first element of
|
||||
/// which is oldest, the last is newest
|
||||
pub fn new(_arr: [T; N]) -> Self {
|
||||
Self {
|
||||
arr: _arr,
|
||||
cursor: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Push a new element to the deque, overwriting the oldest at the same time.
|
||||
pub fn push(&mut self, val: T) {
|
||||
self.arr[self.cursor] = val;
|
||||
self.shift();
|
||||
}
|
||||
|
||||
/// Shift: equivalent to pushing the oldest element
|
||||
pub fn shift(&mut self) {
|
||||
self.n_shifts(1);
|
||||
}
|
||||
|
||||
/// computes n shift in a row
|
||||
pub fn n_shifts(&mut self, n: usize) {
|
||||
self.cursor += n;
|
||||
self.cursor %= N;
|
||||
}
|
||||
|
||||
/// Getter for the internal memory
|
||||
#[allow(dead_code)]
|
||||
pub fn get_arr(&self) -> &[T; N] {
|
||||
&self.arr
|
||||
}
|
||||
}
|
||||
|
||||
/// Index trait for the StaticDeque: 0 is the youngest element, N-1 is the oldest,
|
||||
/// and above N will panic.
|
||||
impl<const N: usize, T> Index<usize> for StaticDeque<N, T> {
|
||||
type Output = T;
|
||||
|
||||
/// 0 is youngest
|
||||
fn index(&self, i: usize) -> &T {
|
||||
if i >= N {
|
||||
panic!("Index {:?} too high for size {:?}", i, N);
|
||||
}
|
||||
&self.arr[(N + self.cursor - i - 1) % N]
|
||||
}
|
||||
}
|
||||
/// IndexMut trait for the StaticDeque: 0 is the youngest element, N-1 is the oldest,
|
||||
/// and above N will panic.
|
||||
impl<const N: usize, T> IndexMut<usize> for StaticDeque<N, T> {
|
||||
/// 0 is youngest
|
||||
fn index_mut(&mut self, i: usize) -> &mut T {
|
||||
if i >= N {
|
||||
panic!("Index {:?} too high for size {:?}", i, N);
|
||||
}
|
||||
&mut self.arr[(N + self.cursor - i - 1) % N]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::static_deque::StaticDeque;
|
||||
|
||||
#[test]
|
||||
fn test_static_deque() {
|
||||
let a = [1, 2, 3, 4, 5, 6];
|
||||
|
||||
let mut static_deque = StaticDeque::new(a);
|
||||
for i in 7..11 {
|
||||
static_deque.push(i);
|
||||
}
|
||||
assert_eq!(*static_deque.get_arr(), [7, 8, 9, 10, 5, 6]);
|
||||
|
||||
for i in 11..15 {
|
||||
static_deque.push(i);
|
||||
}
|
||||
assert_eq!(*static_deque.get_arr(), [13, 14, 9, 10, 11, 12]);
|
||||
|
||||
assert_eq!(static_deque[0], 14);
|
||||
assert_eq!(static_deque[1], 13);
|
||||
assert_eq!(static_deque[2], 12);
|
||||
assert_eq!(static_deque[3], 11);
|
||||
assert_eq!(static_deque[4], 10);
|
||||
assert_eq!(static_deque[5], 9);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_static_deque_indexmut() {
|
||||
let a = [1, 2, 3, 4, 5, 6];
|
||||
|
||||
let mut static_deque = StaticDeque::new(a);
|
||||
for i in 7..11 {
|
||||
static_deque.push(i);
|
||||
}
|
||||
assert_eq!(*static_deque.get_arr(), [7, 8, 9, 10, 5, 6]);
|
||||
|
||||
for i in 11..15 {
|
||||
static_deque.push(i);
|
||||
}
|
||||
assert_eq!(*static_deque.get_arr(), [13, 14, 9, 10, 11, 12]);
|
||||
|
||||
static_deque[1] = 100;
|
||||
|
||||
assert_eq!(static_deque[0], 14);
|
||||
assert_eq!(static_deque[1], 100);
|
||||
assert_eq!(static_deque[2], 12);
|
||||
assert_eq!(static_deque[3], 11);
|
||||
assert_eq!(static_deque[4], 10);
|
||||
assert_eq!(static_deque[5], 9);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_static_deque_index_fail() {
|
||||
let a = [1, 2, 3, 4, 5, 6];
|
||||
|
||||
let static_deque = StaticDeque::new(a);
|
||||
let _ = static_deque[6];
|
||||
}
|
||||
}
|
||||
118
apps/trivium/src/trans_ciphering/mod.rs
Normal file
118
apps/trivium/src/trans_ciphering/mod.rs
Normal file
@@ -0,0 +1,118 @@
|
||||
//! This module will contain extensions of some TriviumStream of KreyviumStream objects,
|
||||
//! when trans ciphering is available to them.
|
||||
|
||||
use crate::{KreyviumStreamByte, KreyviumStreamShortint, TriviumStreamByte, TriviumStreamShortint};
|
||||
use tfhe::shortint::Ciphertext;
|
||||
|
||||
use tfhe::{set_server_key, unset_server_key, FheUint64, FheUint8, ServerKey};
|
||||
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// Triat specifying the interface for trans ciphering a FheUint64 object. Since it is meant
|
||||
/// to be used with stream ciphers, encryption and decryption are by default the same.
|
||||
pub trait TransCiphering {
|
||||
fn trans_encrypt_64(&mut self, cipher: FheUint64) -> FheUint64;
|
||||
fn trans_decrypt_64(&mut self, cipher: FheUint64) -> FheUint64 {
|
||||
self.trans_encrypt_64(cipher)
|
||||
}
|
||||
}
|
||||
|
||||
fn transcipher_from_fheu8_stream(
|
||||
stream: Vec<FheUint8>,
|
||||
cipher: FheUint64,
|
||||
fhe_server_key: &ServerKey,
|
||||
) -> FheUint64 {
|
||||
assert_eq!(stream.len(), 8);
|
||||
|
||||
set_server_key(fhe_server_key.clone());
|
||||
rayon::broadcast(|_| set_server_key(fhe_server_key.clone()));
|
||||
|
||||
let ret: FheUint64 = stream
|
||||
.into_par_iter()
|
||||
.enumerate()
|
||||
.map(|(i, x)| &cipher ^ &(FheUint64::cast_from(x) << (8 * (7 - i) as u8)))
|
||||
.reduce_with(|a, b| a | b)
|
||||
.unwrap();
|
||||
|
||||
unset_server_key();
|
||||
rayon::broadcast(|_| unset_server_key());
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
fn transcipher_from_1_1_stream(
|
||||
stream: Vec<Ciphertext>,
|
||||
cipher: FheUint64,
|
||||
hl_server_key: &ServerKey,
|
||||
internal_server_key: &tfhe::shortint::ServerKey,
|
||||
casting_key: &tfhe::shortint::KeySwitchingKey,
|
||||
) -> FheUint64 {
|
||||
assert_eq!(stream.len(), 64);
|
||||
|
||||
let pairs = (0..32)
|
||||
.into_par_iter()
|
||||
.map(|i| {
|
||||
let byte_idx = 7 - i / 4;
|
||||
let pair_idx = i % 4;
|
||||
|
||||
let b0 = &stream[8 * byte_idx + 2 * pair_idx];
|
||||
let b1 = &stream[8 * byte_idx + 2 * pair_idx + 1];
|
||||
|
||||
casting_key.cast(
|
||||
&internal_server_key
|
||||
.unchecked_add(b0, &internal_server_key.unchecked_scalar_mul(b1, 2)),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
set_server_key(hl_server_key.clone());
|
||||
let ret = &cipher ^ &FheUint64::try_from(pairs).unwrap();
|
||||
unset_server_key();
|
||||
ret
|
||||
}
|
||||
|
||||
impl TransCiphering for TriviumStreamByte<FheUint8> {
|
||||
/// `TriviumStreamByte<FheUint8>`: since a full step outputs 8 bytes, these bytes
|
||||
/// are each shifted by a number in [0, 8), and XORed with the input cipher
|
||||
fn trans_encrypt_64(&mut self, cipher: FheUint64) -> FheUint64 {
|
||||
transcipher_from_fheu8_stream(self.next_64(), cipher, self.get_server_key())
|
||||
}
|
||||
}
|
||||
|
||||
impl TransCiphering for KreyviumStreamByte<FheUint8> {
|
||||
/// `KreyviumStreamByte<FheUint8>`: since a full step outputs 8 bytes, these bytes
|
||||
/// are each shifted by a number in [0, 8), and XORed with the input cipher
|
||||
fn trans_encrypt_64(&mut self, cipher: FheUint64) -> FheUint64 {
|
||||
transcipher_from_fheu8_stream(self.next_64(), cipher, self.get_server_key())
|
||||
}
|
||||
}
|
||||
|
||||
impl TransCiphering for TriviumStreamShortint {
|
||||
/// TriviumStreamShortint: since a full step outputs 64 shortints, these bits
|
||||
/// are paired 2 by 2 in the HL parameter space and packed in a full word,
|
||||
/// and XORed with the input cipher
|
||||
fn trans_encrypt_64(&mut self, cipher: FheUint64) -> FheUint64 {
|
||||
transcipher_from_1_1_stream(
|
||||
self.next_64(),
|
||||
cipher,
|
||||
self.get_hl_server_key(),
|
||||
self.get_internal_server_key(),
|
||||
self.get_casting_key(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl TransCiphering for KreyviumStreamShortint {
|
||||
/// KreyviumStreamShortint: since a full step outputs 64 shortints, these bits
|
||||
/// are paired 2 by 2 in the HL parameter space and packed in a full word,
|
||||
/// and XORed with the input cipher
|
||||
fn trans_encrypt_64(&mut self, cipher: FheUint64) -> FheUint64 {
|
||||
transcipher_from_1_1_stream(
|
||||
self.next_64(),
|
||||
cipher,
|
||||
self.get_hl_server_key(),
|
||||
self.get_internal_server_key(),
|
||||
self.get_casting_key(),
|
||||
)
|
||||
}
|
||||
}
|
||||
11
apps/trivium/src/trivium/mod.rs
Normal file
11
apps/trivium/src/trivium/mod.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
mod trivium_bool;
|
||||
pub use trivium_bool::TriviumStream;
|
||||
|
||||
mod trivium_byte;
|
||||
pub use trivium_byte::TriviumStreamByte;
|
||||
|
||||
mod trivium_shortint;
|
||||
pub use trivium_shortint::TriviumStreamShortint;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
412
apps/trivium/src/trivium/test.rs
Normal file
412
apps/trivium/src/trivium/test.rs
Normal file
@@ -0,0 +1,412 @@
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{generate_keys, ConfigBuilder, FheBool, FheUint64, FheUint8};
|
||||
|
||||
use crate::{TransCiphering, TriviumStream, TriviumStreamByte, TriviumStreamShortint};
|
||||
|
||||
// Values for these tests come from the github repo cantora/avr-crypto-lib, commit 2a5b018,
|
||||
// file testvectors/trivium-80.80.test-vectors
|
||||
|
||||
fn get_hexadecimal_string_from_lsb_first_stream(a: Vec<bool>) -> String {
|
||||
assert!(a.len() % 8 == 0);
|
||||
let mut hexadecimal: String = "".to_string();
|
||||
for test in a.chunks(8) {
|
||||
// Encoding is bytes in LSB order
|
||||
match test[4..8] {
|
||||
[false, false, false, false] => hexadecimal.push('0'),
|
||||
[true, false, false, false] => hexadecimal.push('1'),
|
||||
[false, true, false, false] => hexadecimal.push('2'),
|
||||
[true, true, false, false] => hexadecimal.push('3'),
|
||||
|
||||
[false, false, true, false] => hexadecimal.push('4'),
|
||||
[true, false, true, false] => hexadecimal.push('5'),
|
||||
[false, true, true, false] => hexadecimal.push('6'),
|
||||
[true, true, true, false] => hexadecimal.push('7'),
|
||||
|
||||
[false, false, false, true] => hexadecimal.push('8'),
|
||||
[true, false, false, true] => hexadecimal.push('9'),
|
||||
[false, true, false, true] => hexadecimal.push('A'),
|
||||
[true, true, false, true] => hexadecimal.push('B'),
|
||||
|
||||
[false, false, true, true] => hexadecimal.push('C'),
|
||||
[true, false, true, true] => hexadecimal.push('D'),
|
||||
[false, true, true, true] => hexadecimal.push('E'),
|
||||
[true, true, true, true] => hexadecimal.push('F'),
|
||||
_ => (),
|
||||
};
|
||||
match test[0..4] {
|
||||
[false, false, false, false] => hexadecimal.push('0'),
|
||||
[true, false, false, false] => hexadecimal.push('1'),
|
||||
[false, true, false, false] => hexadecimal.push('2'),
|
||||
[true, true, false, false] => hexadecimal.push('3'),
|
||||
|
||||
[false, false, true, false] => hexadecimal.push('4'),
|
||||
[true, false, true, false] => hexadecimal.push('5'),
|
||||
[false, true, true, false] => hexadecimal.push('6'),
|
||||
[true, true, true, false] => hexadecimal.push('7'),
|
||||
|
||||
[false, false, false, true] => hexadecimal.push('8'),
|
||||
[true, false, false, true] => hexadecimal.push('9'),
|
||||
[false, true, false, true] => hexadecimal.push('A'),
|
||||
[true, true, false, true] => hexadecimal.push('B'),
|
||||
|
||||
[false, false, true, true] => hexadecimal.push('C'),
|
||||
[true, false, true, true] => hexadecimal.push('D'),
|
||||
[false, true, true, true] => hexadecimal.push('E'),
|
||||
[true, true, true, true] => hexadecimal.push('F'),
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
hexadecimal
|
||||
}
|
||||
|
||||
fn get_hexagonal_string_from_bytes(a: Vec<u8>) -> String {
|
||||
assert!(a.len() % 8 == 0);
|
||||
let mut hexadecimal: String = "".to_string();
|
||||
for test in a {
|
||||
hexadecimal.push_str(&format!("{:02X?}", test));
|
||||
}
|
||||
hexadecimal
|
||||
}
|
||||
|
||||
fn get_hexagonal_string_from_u64(a: Vec<u64>) -> String {
|
||||
let mut hexadecimal: String = "".to_string();
|
||||
for test in a {
|
||||
hexadecimal.push_str(&format!("{:016X?}", test));
|
||||
}
|
||||
hexadecimal
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trivium_test_1() {
|
||||
let key = [false; 80];
|
||||
let iv = [false; 80];
|
||||
let output_0_63 = "FBE0BF265859051B517A2E4E239FC97F563203161907CF2DE7A8790FA1B2E9CDF75292030268B7382B4C1A759AA2599A285549986E74805903801A4CB5A5D4F2".to_string();
|
||||
let output_192_255 = "0F1BE95091B8EA857B062AD52BADF47784AC6D9B2E3F85A9D79995043302F0FDF8B76E5BC8B7B4F0AA46CD20DDA04FDD197BC5E1635496828F2DBFB23F6BD5D0".to_string();
|
||||
let output_256_319 = "80F9075437BAC73F696D0ABE3972F5FCE2192E5FCC13C0CB77D0ABA09126838D31A2D38A2087C46304C8A63B54109F679B0B1BC71E72A58D6DD3E0A3FF890D4A".to_string();
|
||||
let output_448_511 = "68450EB0910A98EF1853E0FC1BED8AB6BB08DF5F167D34008C2A85284D4B886DD56883EE92BF18E69121670B4C81A5689C9B0538373D22EB923A28A2DB44C0EB".to_string();
|
||||
|
||||
let mut trivium = TriviumStream::<bool>::new(key, iv);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(512 * 8);
|
||||
while vec.len() < 512 * 8 {
|
||||
vec.push(trivium.next_bool());
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64 * 2]);
|
||||
assert_eq!(output_192_255, hexadecimal[192 * 2..256 * 2]);
|
||||
assert_eq!(output_256_319, hexadecimal[256 * 2..320 * 2]);
|
||||
assert_eq!(output_448_511, hexadecimal[448 * 2..512 * 2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trivium_test_2() {
|
||||
let mut key = [false; 80];
|
||||
let iv = [false; 80];
|
||||
key[7] = true;
|
||||
|
||||
let output_0_63 = "38EB86FF730D7A9CAF8DF13A4420540DBB7B651464C87501552041C249F29A64D2FBF515610921EBE06C8F92CECF7F8098FF20CCCC6A62B97BE8EF7454FC80F9".to_string();
|
||||
let output_192_255 = "EAF2625D411F61E41F6BAEEDDD5FE202600BD472F6C9CD1E9134A745D900EF6C023E4486538F09930CFD37157C0EB57C3EF6C954C42E707D52B743AD83CFF297".to_string();
|
||||
let output_256_319 = "9A203CF7B2F3F09C43D188AA13A5A2021EE998C42F777E9B67C3FA221A0AA1B041AA9E86BC2F5C52AFF11F7D9EE480CB1187B20EB46D582743A52D7CD080A24A".to_string();
|
||||
let output_448_511 = "EBF14772061C210843C18CEA2D2A275AE02FCB18E5D7942455FF77524E8A4CA51E369A847D1AEEFB9002FCD02342983CEAFA9D487CC2032B10192CD416310FA4".to_string();
|
||||
|
||||
let mut trivium = TriviumStream::<bool>::new(key, iv);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(512 * 8);
|
||||
while vec.len() < 512 * 8 {
|
||||
vec.push(trivium.next_bool());
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64 * 2]);
|
||||
assert_eq!(output_192_255, hexadecimal[192 * 2..256 * 2]);
|
||||
assert_eq!(output_256_319, hexadecimal[256 * 2..320 * 2]);
|
||||
assert_eq!(output_448_511, hexadecimal[448 * 2..512 * 2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trivium_test_3() {
|
||||
let key = [false; 80];
|
||||
let mut iv = [false; 80];
|
||||
iv[7] = true;
|
||||
|
||||
let output_0_63 = "F8901736640549E3BA7D42EA2D07B9F49233C18D773008BD755585B1A8CBAB86C1E9A9B91F1AD33483FD6EE3696D659C9374260456A36AAE11F033A519CBD5D7".to_string();
|
||||
let output_192_255 = "87423582AF64475C3A9C092E32A53C5FE07D35B4C9CA288A89A43DEF3913EA9237CA43342F3F8E83AD3A5C38D463516F94E3724455656A36279E3E924D442F06".to_string();
|
||||
let output_256_319 = "D94389A90E6F3BF2BB4C8B057339AAD8AA2FEA238C29FCAC0D1FF1CB2535A07058BA995DD44CFC54CCEC54A5405B944C532D74E50EA370CDF1BA1CBAE93FC0B5".to_string();
|
||||
let output_448_511 = "4844151714E56A3A2BBFBA426A1D60F9A4F265210A91EC29259AE2035234091C49FFB1893FA102D425C57C39EB4916F6D148DC83EBF7DE51EEB9ABFE045FB282".to_string();
|
||||
|
||||
let mut trivium = TriviumStream::<bool>::new(key, iv);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(512 * 8);
|
||||
while vec.len() < 512 * 8 {
|
||||
vec.push(trivium.next_bool());
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64 * 2]);
|
||||
assert_eq!(output_192_255, hexadecimal[192 * 2..256 * 2]);
|
||||
assert_eq!(output_256_319, hexadecimal[256 * 2..320 * 2]);
|
||||
assert_eq!(output_448_511, hexadecimal[448 * 2..512 * 2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trivium_test_4() {
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [false; 80];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [false; 80];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let output_0_63 = "F4CD954A717F26A7D6930830C4E7CF0819F80E03F25F342C64ADC66ABA7F8A8E6EAA49F23632AE3CD41A7BD290A0132F81C6D4043B6E397D7388F3A03B5FE358".to_string();
|
||||
let output_65472_65535 = "C04C24A6938C8AF8A491D5E481271E0E601338F01067A86A795CA493AA4FF265619B8D448B706B7C88EE8395FC79E5B51AB40245BBF7773AE67DF86FCFB71F30".to_string();
|
||||
let output_65536_65599 = "011A0D7EC32FA102C66C164CFCB189AED9F6982E8C7370A6A37414781192CEB155C534C1C8C9E53FDEADF2D3D0577DAD3A8EB2F6E5265F1E831C86844670BC69".to_string();
|
||||
let output_131008_131071 = "48107374A9CE3AAF78221AE77789247CF6896A249ED75DCE0CF2D30EB9D889A0C61C9F480E5C07381DED9FAB2AD54333E82C89BA92E6E47FD828F1A66A8656E0".to_string();
|
||||
|
||||
let mut trivium = TriviumStream::<bool>::new(key, iv);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(131072 * 8);
|
||||
while vec.len() < 131072 * 8 {
|
||||
vec.push(trivium.next_bool());
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64 * 2]);
|
||||
assert_eq!(output_65472_65535, hexadecimal[65472 * 2..65536 * 2]);
|
||||
assert_eq!(output_65536_65599, hexadecimal[65536 * 2..65600 * 2]);
|
||||
assert_eq!(output_131008_131071, hexadecimal[131008 * 2..131072 * 2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trivium_test_clear_byte() {
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0u8; 10];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0u8; 10];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let output_0_63 = "F4CD954A717F26A7D6930830C4E7CF0819F80E03F25F342C64ADC66ABA7F8A8E6EAA49F23632AE3CD41A7BD290A0132F81C6D4043B6E397D7388F3A03B5FE358".to_string();
|
||||
let output_65472_65535 = "C04C24A6938C8AF8A491D5E481271E0E601338F01067A86A795CA493AA4FF265619B8D448B706B7C88EE8395FC79E5B51AB40245BBF7773AE67DF86FCFB71F30".to_string();
|
||||
let output_65536_65599 = "011A0D7EC32FA102C66C164CFCB189AED9F6982E8C7370A6A37414781192CEB155C534C1C8C9E53FDEADF2D3D0577DAD3A8EB2F6E5265F1E831C86844670BC69".to_string();
|
||||
let output_131008_131071 = "48107374A9CE3AAF78221AE77789247CF6896A249ED75DCE0CF2D30EB9D889A0C61C9F480E5C07381DED9FAB2AD54333E82C89BA92E6E47FD828F1A66A8656E0".to_string();
|
||||
|
||||
let mut trivium = TriviumStreamByte::<u8>::new(key, iv);
|
||||
|
||||
let mut vec = Vec::<u8>::with_capacity(131072);
|
||||
while vec.len() < 131072 {
|
||||
let outputs = trivium.next_64();
|
||||
for c in outputs {
|
||||
vec.push(c)
|
||||
}
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexagonal_string_from_bytes(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64 * 2]);
|
||||
assert_eq!(output_65472_65535, hexadecimal[65472 * 2..65536 * 2]);
|
||||
assert_eq!(output_65536_65599, hexadecimal[65536 * 2..65600 * 2]);
|
||||
assert_eq!(output_131008_131071, hexadecimal[131008 * 2..131072 * 2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trivium_test_fhe_long() {
|
||||
let config = ConfigBuilder::all_disabled().enable_default_bool().build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [false; 80];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [false; 80];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val: u8 = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2 == 1;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let output_0_63 = "F4CD954A717F26A7D6930830C4E7CF0819F80E03F25F342C64ADC66ABA7F8A8E6EAA49F23632AE3CD41A7BD290A0132F81C6D4043B6E397D7388F3A03B5FE358".to_string();
|
||||
|
||||
let cipher_key = key.map(|x| FheBool::encrypt(x, &client_key));
|
||||
|
||||
let mut trivium = TriviumStream::<FheBool>::new(cipher_key, iv, &server_key);
|
||||
|
||||
let mut vec = Vec::<bool>::with_capacity(64 * 8);
|
||||
while vec.len() < 64 * 8 {
|
||||
let cipher_outputs = trivium.next_64();
|
||||
for c in cipher_outputs {
|
||||
vec.push(c.decrypt(&client_key))
|
||||
}
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexadecimal_string_from_lsb_first_stream(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64 * 2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trivium_test_fhe_byte_long() {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0u8; 10];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0u8; 10];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let output_0_63 = "F4CD954A717F26A7D6930830C4E7CF0819F80E03F25F342C64ADC66ABA7F8A8E6EAA49F23632AE3CD41A7BD290A0132F81C6D4043B6E397D7388F3A03B5FE358".to_string();
|
||||
|
||||
let cipher_key = key.map(|x| FheUint8::encrypt(x, &client_key));
|
||||
|
||||
let mut trivium = TriviumStreamByte::<FheUint8>::new(cipher_key, iv, &server_key);
|
||||
|
||||
let mut vec = Vec::<u8>::with_capacity(64);
|
||||
while vec.len() < 64 {
|
||||
let cipher_outputs = trivium.next_64();
|
||||
for c in cipher_outputs {
|
||||
vec.push(c.decrypt(&client_key))
|
||||
}
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexagonal_string_from_bytes(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64 * 2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trivium_test_fhe_byte_transciphering_long() {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (client_key, server_key) = generate_keys(config);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0u8; 10];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
key[i >> 1] = u8::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0u8; 10];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
iv[i >> 1] = u8::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
}
|
||||
|
||||
let output_0_63 = "F4CD954A717F26A7D6930830C4E7CF0819F80E03F25F342C64ADC66ABA7F8A8E6EAA49F23632AE3CD41A7BD290A0132F81C6D4043B6E397D7388F3A03B5FE358".to_string();
|
||||
|
||||
let cipher_key = key.map(|x| FheUint8::encrypt(x, &client_key));
|
||||
|
||||
let mut ciphered_message = vec![FheUint64::try_encrypt(0u64, &client_key).unwrap(); 9];
|
||||
|
||||
let mut trivium = TriviumStreamByte::<FheUint8>::new(cipher_key, iv, &server_key);
|
||||
|
||||
let mut vec = Vec::<u64>::with_capacity(8);
|
||||
while vec.len() < 8 {
|
||||
let trans_ciphered_message = trivium.trans_encrypt_64(ciphered_message.pop().unwrap());
|
||||
vec.push(trans_ciphered_message.decrypt(&client_key));
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexagonal_string_from_u64(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64 * 2]);
|
||||
}
|
||||
|
||||
use tfhe::shortint::prelude::*;
|
||||
|
||||
#[test]
|
||||
fn trivium_test_shortint_long() {
|
||||
let config = ConfigBuilder::all_disabled()
|
||||
.enable_default_integers()
|
||||
.build();
|
||||
let (hl_client_key, hl_server_key) = generate_keys(config);
|
||||
let underlying_ck: tfhe::shortint::ClientKey = (*hl_client_key.as_ref()).clone().into();
|
||||
let underlying_sk: tfhe::shortint::ServerKey = (*hl_server_key.as_ref()).clone().into();
|
||||
|
||||
let (client_key, server_key): (ClientKey, ServerKey) = gen_keys(PARAM_MESSAGE_1_CARRY_1_KS_PBS);
|
||||
|
||||
let ksk = KeySwitchingKey::new(
|
||||
(&client_key, &server_key),
|
||||
(&underlying_ck, &underlying_sk),
|
||||
PARAM_KEYSWITCH_1_1_KS_PBS_TO_2_2_KS_PBS,
|
||||
);
|
||||
|
||||
let key_string = "0053A6F94C9FF24598EB".to_string();
|
||||
let mut key = [0; 80];
|
||||
|
||||
for i in (0..key_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&key_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
key[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
|
||||
let iv_string = "0D74DB42A91077DE45AC".to_string();
|
||||
let mut iv = [0; 80];
|
||||
|
||||
for i in (0..iv_string.len()).step_by(2) {
|
||||
let mut val = u64::from_str_radix(&iv_string[i..i + 2], 16).unwrap();
|
||||
for j in 0..8 {
|
||||
iv[8 * (i >> 1) + j] = val % 2;
|
||||
val >>= 1;
|
||||
}
|
||||
}
|
||||
let output_0_63 = "F4CD954A717F26A7D6930830C4E7CF0819F80E03F25F342C64ADC66ABA7F8A8E6EAA49F23632AE3CD41A7BD290A0132F81C6D4043B6E397D7388F3A03B5FE358".to_string();
|
||||
|
||||
let cipher_key = key.map(|x| client_key.encrypt(x));
|
||||
|
||||
let mut ciphered_message = vec![FheUint64::try_encrypt(0u64, &hl_client_key).unwrap(); 9];
|
||||
|
||||
let mut trivium = TriviumStreamShortint::new(cipher_key, iv, server_key, ksk, hl_server_key);
|
||||
|
||||
let mut vec = Vec::<u64>::with_capacity(8);
|
||||
while vec.len() < 8 {
|
||||
let trans_ciphered_message = trivium.trans_encrypt_64(ciphered_message.pop().unwrap());
|
||||
vec.push(trans_ciphered_message.decrypt(&hl_client_key));
|
||||
}
|
||||
|
||||
let hexadecimal = get_hexagonal_string_from_u64(vec);
|
||||
assert_eq!(output_0_63, hexadecimal[0..64 * 2]);
|
||||
}
|
||||
225
apps/trivium/src/trivium/trivium_bool.rs
Normal file
225
apps/trivium/src/trivium/trivium_bool.rs
Normal file
@@ -0,0 +1,225 @@
|
||||
//! This module implements the Trivium stream cipher, using booleans or FheBool
|
||||
//! for the representation of the inner bits.
|
||||
|
||||
use crate::static_deque::StaticDeque;
|
||||
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{set_server_key, unset_server_key, FheBool, ServerKey};
|
||||
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// Internal trait specifying which operations are necessary for TriviumStream generic type
|
||||
pub trait TriviumBoolInput<OpOutput>:
|
||||
Sized
|
||||
+ Clone
|
||||
+ std::ops::BitXor<Output = OpOutput>
|
||||
+ std::ops::BitAnd<Output = OpOutput>
|
||||
+ std::ops::Not<Output = OpOutput>
|
||||
{
|
||||
}
|
||||
impl TriviumBoolInput<bool> for bool {}
|
||||
impl TriviumBoolInput<bool> for &bool {}
|
||||
impl TriviumBoolInput<FheBool> for FheBool {}
|
||||
impl TriviumBoolInput<FheBool> for &FheBool {}
|
||||
|
||||
/// TriviumStream: a struct implementing the Trivium stream cipher, using T for the internal
|
||||
/// representation of bits (bool or FheBool). To be able to compute FHE operations, it also owns
|
||||
/// an Option for a ServerKey.
|
||||
pub struct TriviumStream<T> {
|
||||
a: StaticDeque<93, T>,
|
||||
b: StaticDeque<84, T>,
|
||||
c: StaticDeque<111, T>,
|
||||
fhe_key: Option<ServerKey>,
|
||||
}
|
||||
|
||||
impl TriviumStream<bool> {
|
||||
/// Constructor for `TriviumStream<bool>`: arguments are the secret key and the input vector.
|
||||
/// Outputs a TriviumStream object already initialized (1152 steps have been run before
|
||||
/// returning)
|
||||
pub fn new(key: [bool; 80], iv: [bool; 80]) -> TriviumStream<bool> {
|
||||
// Initialization of Trivium registers: a has the secret key, b the input vector,
|
||||
// and c a few ones.
|
||||
let mut a_register = [false; 93];
|
||||
let mut b_register = [false; 84];
|
||||
let mut c_register = [false; 111];
|
||||
|
||||
for i in 0..80 {
|
||||
a_register[93 - 80 + i] = key[i];
|
||||
b_register[84 - 80 + i] = iv[i];
|
||||
}
|
||||
|
||||
c_register[0] = true;
|
||||
c_register[1] = true;
|
||||
c_register[2] = true;
|
||||
|
||||
TriviumStream::<bool>::new_from_registers(a_register, b_register, c_register, None)
|
||||
}
|
||||
}
|
||||
|
||||
impl TriviumStream<FheBool> {
|
||||
/// Constructor for `TriviumStream<FheBool>`: arguments are the encrypted secret key and input
|
||||
/// vector, and the FHE server key.
|
||||
/// Outputs a TriviumStream object already initialized (1152 steps have been run before
|
||||
/// returning)
|
||||
pub fn new(key: [FheBool; 80], iv: [bool; 80], sk: &ServerKey) -> TriviumStream<FheBool> {
|
||||
set_server_key(sk.clone());
|
||||
|
||||
// Initialization of Trivium registers: a has the secret key, b the input vector,
|
||||
// and c a few ones.
|
||||
let mut a_register = [false; 93].map(FheBool::encrypt_trivial);
|
||||
let mut b_register = [false; 84].map(FheBool::encrypt_trivial);
|
||||
let mut c_register = [false; 111].map(FheBool::encrypt_trivial);
|
||||
|
||||
for i in 0..80 {
|
||||
a_register[93 - 80 + i] = key[i].clone();
|
||||
b_register[84 - 80 + i] = FheBool::encrypt_trivial(iv[i]);
|
||||
}
|
||||
|
||||
c_register[0] = FheBool::try_encrypt_trivial(true).unwrap();
|
||||
c_register[1] = FheBool::try_encrypt_trivial(true).unwrap();
|
||||
c_register[2] = FheBool::try_encrypt_trivial(true).unwrap();
|
||||
|
||||
unset_server_key();
|
||||
TriviumStream::<FheBool>::new_from_registers(
|
||||
a_register,
|
||||
b_register,
|
||||
c_register,
|
||||
Some(sk.clone()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> TriviumStream<T>
|
||||
where
|
||||
T: TriviumBoolInput<T> + std::marker::Send + std::marker::Sync,
|
||||
for<'a> &'a T: TriviumBoolInput<T>,
|
||||
{
|
||||
/// Internal generic constructor: arguments are already prepared registers, and an optional FHE
|
||||
/// server key
|
||||
fn new_from_registers(
|
||||
a_register: [T; 93],
|
||||
b_register: [T; 84],
|
||||
c_register: [T; 111],
|
||||
key: Option<ServerKey>,
|
||||
) -> Self {
|
||||
let mut ret = Self {
|
||||
a: StaticDeque::<93, T>::new(a_register),
|
||||
b: StaticDeque::<84, T>::new(b_register),
|
||||
c: StaticDeque::<111, T>::new(c_register),
|
||||
fhe_key: key,
|
||||
};
|
||||
ret.init();
|
||||
ret
|
||||
}
|
||||
|
||||
/// The specification of Trivium includes running 1152 (= 18*64) unused steps to mix up the
|
||||
/// registers, before starting the proper stream
|
||||
fn init(&mut self) {
|
||||
for _ in 0..18 {
|
||||
self.next_64();
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes one turn of the stream, updating registers and outputting the new bit.
|
||||
pub fn next_bool(&mut self) -> T {
|
||||
match &self.fhe_key {
|
||||
Some(sk) => set_server_key(sk.clone()),
|
||||
None => (),
|
||||
};
|
||||
|
||||
let [o, a, b, c] = self.get_output_and_values(0);
|
||||
|
||||
self.a.push(a);
|
||||
self.b.push(b);
|
||||
self.c.push(c);
|
||||
|
||||
o
|
||||
}
|
||||
|
||||
/// Computes a potential future step of Trivium, n terms in the future. This does not update
|
||||
/// registers, but rather returns with the output, the three values that will be used to
|
||||
/// update the registers, when the time is right. This function is meant to be used in
|
||||
/// parallel.
|
||||
fn get_output_and_values(&self, n: usize) -> [T; 4] {
|
||||
assert!(n < 65);
|
||||
|
||||
let (((temp_a, temp_b), (temp_c, a_and)), (b_and, c_and)) = rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| &self.a[65 - n] ^ &self.a[92 - n],
|
||||
|| &self.b[68 - n] ^ &self.b[83 - n],
|
||||
)
|
||||
},
|
||||
|| {
|
||||
rayon::join(
|
||||
|| &self.c[65 - n] ^ &self.c[110 - n],
|
||||
|| &self.a[91 - n] & &self.a[90 - n],
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
|| {
|
||||
rayon::join(
|
||||
|| &self.b[82 - n] & &self.b[81 - n],
|
||||
|| &self.c[109 - n] & &self.c[108 - n],
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
let ((o, a), (b, c)) = rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| &(&temp_a ^ &temp_b) ^ &temp_c,
|
||||
|| &temp_c ^ &(&c_and ^ &self.a[68 - n]),
|
||||
)
|
||||
},
|
||||
|| {
|
||||
rayon::join(
|
||||
|| &temp_a ^ &(&a_and ^ &self.b[77 - n]),
|
||||
|| &temp_b ^ &(&b_and ^ &self.c[86 - n]),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
[o, a, b, c]
|
||||
}
|
||||
|
||||
/// This calls `get_output_and_values` in parallel 64 times, and stores all results in a Vec.
|
||||
fn get_64_output_and_values(&self) -> Vec<[T; 4]> {
|
||||
(0..64)
|
||||
.into_par_iter()
|
||||
.map(|x| self.get_output_and_values(x))
|
||||
.rev()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Computes 64 turns of the stream, outputting the 64 bits all at once in a
|
||||
/// Vec (first value is oldest, last is newest)
|
||||
pub fn next_64(&mut self) -> Vec<T> {
|
||||
match &self.fhe_key {
|
||||
Some(sk) => {
|
||||
rayon::broadcast(|_| set_server_key(sk.clone()));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
let mut values = self.get_64_output_and_values();
|
||||
match &self.fhe_key {
|
||||
Some(_) => {
|
||||
rayon::broadcast(|_| unset_server_key());
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
|
||||
let mut ret = Vec::<T>::with_capacity(64);
|
||||
|
||||
while let Some([o, a, b, c]) = values.pop() {
|
||||
ret.push(o);
|
||||
self.a.push(a);
|
||||
self.b.push(b);
|
||||
self.c.push(c);
|
||||
}
|
||||
ret
|
||||
}
|
||||
}
|
||||
241
apps/trivium/src/trivium/trivium_byte.rs
Normal file
241
apps/trivium/src/trivium/trivium_byte.rs
Normal file
@@ -0,0 +1,241 @@
|
||||
//! This module implements the Trivium stream cipher, using u8 or FheUint8
|
||||
//! for the representation of the inner bits.
|
||||
|
||||
use crate::static_deque::{StaticByteDeque, StaticByteDequeInput};
|
||||
|
||||
use tfhe::prelude::*;
|
||||
use tfhe::{set_server_key, unset_server_key, FheUint8, ServerKey};
|
||||
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// Internal trait specifying which operations are necessary for TriviumStreamByte generic type
|
||||
pub trait TriviumByteInput<OpOutput>:
|
||||
Sized
|
||||
+ Clone
|
||||
+ Send
|
||||
+ Sync
|
||||
+ StaticByteDequeInput<OpOutput>
|
||||
+ std::ops::BitXor<Output = OpOutput>
|
||||
+ std::ops::BitAnd<Output = OpOutput>
|
||||
+ std::ops::Shr<u8, Output = OpOutput>
|
||||
+ std::ops::Shl<u8, Output = OpOutput>
|
||||
+ std::ops::Add<Output = OpOutput>
|
||||
{
|
||||
}
|
||||
impl TriviumByteInput<u8> for u8 {}
|
||||
impl TriviumByteInput<u8> for &u8 {}
|
||||
impl TriviumByteInput<FheUint8> for FheUint8 {}
|
||||
impl TriviumByteInput<FheUint8> for &FheUint8 {}
|
||||
|
||||
/// TriviumStreamByte: a struct implementing the Trivium stream cipher, using T for the internal
|
||||
/// representation of bits (u8 or FheUint8). To be able to compute FHE operations, it also owns
|
||||
/// an Option for a ServerKey.
|
||||
/// Since the original Trivium registers' sizes are not a multiple of 8, these registers (which
|
||||
/// store byte-like objects) have a size that is the eighth of the closest multiple of 8 above the
|
||||
/// originals' sizes.
|
||||
pub struct TriviumStreamByte<T> {
|
||||
a_byte: StaticByteDeque<12, T>,
|
||||
b_byte: StaticByteDeque<11, T>,
|
||||
c_byte: StaticByteDeque<14, T>,
|
||||
fhe_key: Option<ServerKey>,
|
||||
}
|
||||
|
||||
impl TriviumStreamByte<u8> {
|
||||
/// Constructor for `TriviumStreamByte<u8>`: arguments are the secret key and the input vector.
|
||||
/// Outputs a TriviumStream object already initialized (1152 steps have been run before
|
||||
/// returning)
|
||||
pub fn new(key: [u8; 10], iv: [u8; 10]) -> TriviumStreamByte<u8> {
|
||||
// Initialization of Trivium registers: a has the secret key, b the input vector,
|
||||
// and c a few ones.
|
||||
let mut a_byte_reg = [0u8; 12];
|
||||
let mut b_byte_reg = [0u8; 11];
|
||||
let mut c_byte_reg = [0u8; 14];
|
||||
|
||||
for i in 0..10 {
|
||||
a_byte_reg[12 - 10 + i] = key[i];
|
||||
b_byte_reg[11 - 10 + i] = iv[i];
|
||||
}
|
||||
|
||||
// Magic number 14, aka 00001110: this represents the 3 ones at the beginning of the c
|
||||
// registers, with additional zeros to make the register's size a multiple of 8.
|
||||
c_byte_reg[0] = 14;
|
||||
|
||||
let mut ret =
|
||||
TriviumStreamByte::<u8>::new_from_registers(a_byte_reg, b_byte_reg, c_byte_reg, None);
|
||||
ret.init();
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
impl TriviumStreamByte<FheUint8> {
|
||||
/// Constructor for `TriviumStream<FheUint8>`: arguments are the encrypted secret key and input
|
||||
/// vector, and the FHE server key.
|
||||
/// Outputs a TriviumStream object already initialized (1152 steps have been run before
|
||||
/// returning)
|
||||
pub fn new(
|
||||
key: [FheUint8; 10],
|
||||
iv: [u8; 10],
|
||||
server_key: &ServerKey,
|
||||
) -> TriviumStreamByte<FheUint8> {
|
||||
set_server_key(server_key.clone());
|
||||
|
||||
// Initialization of Trivium registers: a has the secret key, b the input vector,
|
||||
// and c a few ones.
|
||||
let mut a_byte_reg = [0u8; 12].map(FheUint8::encrypt_trivial);
|
||||
let mut b_byte_reg = [0u8; 11].map(FheUint8::encrypt_trivial);
|
||||
let mut c_byte_reg = [0u8; 14].map(FheUint8::encrypt_trivial);
|
||||
|
||||
for i in 0..10 {
|
||||
a_byte_reg[12 - 10 + i] = key[i].clone();
|
||||
b_byte_reg[11 - 10 + i] = FheUint8::encrypt_trivial(iv[i]);
|
||||
}
|
||||
|
||||
// Magic number 14, aka 00001110: this represents the 3 ones at the beginning of the c
|
||||
// registers, with additional zeros to make the register's size a multiple of 8.
|
||||
c_byte_reg[0] = FheUint8::encrypt_trivial(14u8);
|
||||
|
||||
unset_server_key();
|
||||
let mut ret = TriviumStreamByte::<FheUint8>::new_from_registers(
|
||||
a_byte_reg,
|
||||
b_byte_reg,
|
||||
c_byte_reg,
|
||||
Some(server_key.clone()),
|
||||
);
|
||||
ret.init();
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> TriviumStreamByte<T>
|
||||
where
|
||||
T: TriviumByteInput<T> + Send,
|
||||
for<'a> &'a T: TriviumByteInput<T>,
|
||||
{
|
||||
/// Internal generic constructor: arguments are already prepared registers, and an optional FHE
|
||||
/// server key
|
||||
fn new_from_registers(
|
||||
a_register: [T; 12],
|
||||
b_register: [T; 11],
|
||||
c_register: [T; 14],
|
||||
sk: Option<ServerKey>,
|
||||
) -> Self {
|
||||
Self {
|
||||
a_byte: StaticByteDeque::<12, T>::new(a_register),
|
||||
b_byte: StaticByteDeque::<11, T>::new(b_register),
|
||||
c_byte: StaticByteDeque::<14, T>::new(c_register),
|
||||
fhe_key: sk,
|
||||
}
|
||||
}
|
||||
|
||||
/// The specification of Trivium includes running 1152 (= 18*64) unused steps to mix up the
|
||||
/// registers, before starting the proper stream
|
||||
fn init(&mut self) {
|
||||
for _ in 0..18 {
|
||||
self.next_64();
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes 8 potential future step of Trivium, b*8 terms in the future. This does not update
|
||||
/// registers, but rather returns with the output, the three values that will be used to
|
||||
/// update the registers, when the time is right. This function is meant to be used in
|
||||
/// parallel.
|
||||
fn get_output_and_values(&self, b: usize) -> [T; 4] {
|
||||
let n = b * 8 + 7;
|
||||
assert!(n < 65);
|
||||
|
||||
let ((a1, a2, a3, a4, a5), ((b1, b2, b3, b4, b5), (c1, c2, c3, c4, c5))) = rayon::join(
|
||||
|| Self::get_bytes(&self.a_byte, [91 - n, 90 - n, 68 - n, 65 - n, 92 - n]),
|
||||
|| {
|
||||
rayon::join(
|
||||
|| Self::get_bytes(&self.b_byte, [82 - n, 81 - n, 77 - n, 68 - n, 83 - n]),
|
||||
|| Self::get_bytes(&self.c_byte, [109 - n, 108 - n, 86 - n, 65 - n, 110 - n]),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
let (((temp_a, temp_b), (temp_c, a_and)), (b_and, c_and)) = rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| rayon::join(|| a4 ^ a5, || b4 ^ b5),
|
||||
|| rayon::join(|| c4 ^ c5, || a1 & a2),
|
||||
)
|
||||
},
|
||||
|| rayon::join(|| b1 & b2, || c1 & c2),
|
||||
);
|
||||
|
||||
let (temp_a_2, temp_b_2, temp_c_2) = (temp_a.clone(), temp_b.clone(), temp_c.clone());
|
||||
|
||||
let ((o, a), (b, c)) = rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| (temp_a_2 ^ temp_b_2) ^ temp_c_2,
|
||||
|| temp_c ^ ((c_and) ^ a3),
|
||||
)
|
||||
},
|
||||
|| rayon::join(|| temp_a ^ (a_and ^ b3), || temp_b ^ (b_and ^ c3)),
|
||||
);
|
||||
|
||||
[o, a, b, c]
|
||||
}
|
||||
|
||||
/// This calls `get_output_and_values` in parallel 8 times, and stores all results in a Vec.
|
||||
fn get_64_output_and_values(&self) -> Vec<[T; 4]> {
|
||||
(0..8)
|
||||
.into_par_iter()
|
||||
.map(|i| self.get_output_and_values(i))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Computes 64 turns of the stream, outputting the 64 bits (in 8 bytes) all at once in a
|
||||
/// Vec (first value is oldest, last is newest)
|
||||
pub fn next_64(&mut self) -> Vec<T> {
|
||||
match &self.fhe_key {
|
||||
Some(sk) => {
|
||||
rayon::broadcast(|_| set_server_key(sk.clone()));
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
let values = self.get_64_output_and_values();
|
||||
match &self.fhe_key {
|
||||
Some(_) => {
|
||||
rayon::broadcast(|_| unset_server_key());
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
|
||||
let mut bytes = Vec::<T>::with_capacity(8);
|
||||
for [o, a, b, c] in values {
|
||||
self.a_byte.push(a);
|
||||
self.b_byte.push(b);
|
||||
self.c_byte.push(c);
|
||||
bytes.push(o);
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
/// Reconstructs a bunch of 5 bytes in a parallel fashion.
|
||||
fn get_bytes<const N: usize>(
|
||||
reg: &StaticByteDeque<N, T>,
|
||||
offsets: [usize; 5],
|
||||
) -> (T, T, T, T, T) {
|
||||
let mut ret = offsets
|
||||
.par_iter()
|
||||
.rev()
|
||||
.map(|&i| reg.byte(i))
|
||||
.collect::<Vec<_>>();
|
||||
(
|
||||
ret.pop().unwrap(),
|
||||
ret.pop().unwrap(),
|
||||
ret.pop().unwrap(),
|
||||
ret.pop().unwrap(),
|
||||
ret.pop().unwrap(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl TriviumStreamByte<FheUint8> {
|
||||
pub fn get_server_key(&self) -> &ServerKey {
|
||||
self.fhe_key.as_ref().unwrap()
|
||||
}
|
||||
}
|
||||
189
apps/trivium/src/trivium/trivium_shortint.rs
Normal file
189
apps/trivium/src/trivium/trivium_shortint.rs
Normal file
@@ -0,0 +1,189 @@
|
||||
use crate::static_deque::StaticDeque;
|
||||
|
||||
use tfhe::shortint::prelude::*;
|
||||
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// TriviumStreamShortint: a struct implementing the Trivium stream cipher, using a generic
|
||||
/// Ciphertext for the internal representation of bits (intended to represent a single bit). To be
|
||||
/// able to compute FHE operations, it also owns a ServerKey.
|
||||
pub struct TriviumStreamShortint {
|
||||
a: StaticDeque<93, Ciphertext>,
|
||||
b: StaticDeque<84, Ciphertext>,
|
||||
c: StaticDeque<111, Ciphertext>,
|
||||
internal_server_key: ServerKey,
|
||||
transciphering_casting_key: KeySwitchingKey,
|
||||
hl_server_key: tfhe::ServerKey,
|
||||
}
|
||||
|
||||
impl TriviumStreamShortint {
|
||||
/// Constructor for TriviumStreamShortint: arguments are the secret key and the input vector,
|
||||
/// and a ServerKey reference. Outputs a TriviumStream object already initialized (1152
|
||||
/// steps have been run before returning)
|
||||
pub fn new(
|
||||
key: [Ciphertext; 80],
|
||||
iv: [u64; 80],
|
||||
sk: ServerKey,
|
||||
ksk: KeySwitchingKey,
|
||||
hl_sk: tfhe::ServerKey,
|
||||
) -> Self {
|
||||
// Initialization of Trivium registers: a has the secret key, b the input vector,
|
||||
// and c a few ones.
|
||||
let mut a_register: [Ciphertext; 93] = [0; 93].map(|x| sk.create_trivial(x));
|
||||
let mut b_register: [Ciphertext; 84] = [0; 84].map(|x| sk.create_trivial(x));
|
||||
let mut c_register: [Ciphertext; 111] = [0; 111].map(|x| sk.create_trivial(x));
|
||||
|
||||
for i in 0..80 {
|
||||
a_register[93 - 80 + i] = key[i].clone();
|
||||
b_register[84 - 80 + i] = sk.create_trivial(iv[i]);
|
||||
}
|
||||
|
||||
c_register[0] = sk.create_trivial(1);
|
||||
c_register[1] = sk.create_trivial(1);
|
||||
c_register[2] = sk.create_trivial(1);
|
||||
|
||||
let mut ret = Self {
|
||||
a: StaticDeque::<93, Ciphertext>::new(a_register),
|
||||
b: StaticDeque::<84, Ciphertext>::new(b_register),
|
||||
c: StaticDeque::<111, Ciphertext>::new(c_register),
|
||||
internal_server_key: sk,
|
||||
transciphering_casting_key: ksk,
|
||||
hl_server_key: hl_sk,
|
||||
};
|
||||
ret.init();
|
||||
ret
|
||||
}
|
||||
|
||||
/// The specification of Trivium includes running 1152 (= 18*64) unused steps to mix up the
|
||||
/// registers, before starting the proper stream
|
||||
fn init(&mut self) {
|
||||
for _ in 0..18 {
|
||||
self.next_64();
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes one turn of the stream, updating registers and outputting the new bit.
|
||||
pub fn next_ct(&mut self) -> Ciphertext {
|
||||
let [o, a, b, c] = self.get_output_and_values(0);
|
||||
|
||||
self.a.push(a);
|
||||
self.b.push(b);
|
||||
self.c.push(c);
|
||||
|
||||
o
|
||||
}
|
||||
|
||||
/// Computes a potential future step of Trivium, n terms in the future. This does not update
|
||||
/// registers, but rather returns with the output, the three values that will be used to
|
||||
/// update the registers, when the time is right. This function is meant to be used in
|
||||
/// parallel.
|
||||
fn get_output_and_values(&self, n: usize) -> [Ciphertext; 4] {
|
||||
let (a1, a2, a3, a4, a5) = (
|
||||
&self.a[65 - n],
|
||||
&self.a[92 - n],
|
||||
&self.a[91 - n],
|
||||
&self.a[90 - n],
|
||||
&self.a[68 - n],
|
||||
);
|
||||
let (b1, b2, b3, b4, b5) = (
|
||||
&self.b[68 - n],
|
||||
&self.b[83 - n],
|
||||
&self.b[82 - n],
|
||||
&self.b[81 - n],
|
||||
&self.b[77 - n],
|
||||
);
|
||||
let (c1, c2, c3, c4, c5) = (
|
||||
&self.c[65 - n],
|
||||
&self.c[110 - n],
|
||||
&self.c[109 - n],
|
||||
&self.c[108 - n],
|
||||
&self.c[86 - n],
|
||||
);
|
||||
|
||||
let temp_a = self.internal_server_key.unchecked_add(a1, a2);
|
||||
let temp_b = self.internal_server_key.unchecked_add(b1, b2);
|
||||
let temp_c = self.internal_server_key.unchecked_add(c1, c2);
|
||||
|
||||
let ((new_a, new_b), (new_c, o)) = rayon::join(
|
||||
|| {
|
||||
rayon::join(
|
||||
|| {
|
||||
let mut new_a = self.internal_server_key.unchecked_bitand(c3, c4);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_a, a5);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_a, &temp_c);
|
||||
self.internal_server_key.message_extract_assign(&mut new_a);
|
||||
new_a
|
||||
},
|
||||
|| {
|
||||
let mut new_b = self.internal_server_key.unchecked_bitand(a3, a4);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_b, b5);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_b, &temp_a);
|
||||
self.internal_server_key.message_extract_assign(&mut new_b);
|
||||
new_b
|
||||
},
|
||||
)
|
||||
},
|
||||
|| {
|
||||
rayon::join(
|
||||
|| {
|
||||
let mut new_c = self.internal_server_key.unchecked_bitand(b3, b4);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_c, c5);
|
||||
self.internal_server_key
|
||||
.unchecked_add_assign(&mut new_c, &temp_b);
|
||||
self.internal_server_key.message_extract_assign(&mut new_c);
|
||||
new_c
|
||||
},
|
||||
|| {
|
||||
self.internal_server_key.bitxor(
|
||||
&self.internal_server_key.unchecked_add(&temp_a, &temp_b),
|
||||
&temp_c,
|
||||
)
|
||||
},
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
[o, new_a, new_b, new_c]
|
||||
}
|
||||
|
||||
/// This calls `get_output_and_values` in parallel 64 times, and stores all results in a Vec.
|
||||
fn get_64_output_and_values(&self) -> Vec<[Ciphertext; 4]> {
|
||||
(0..64)
|
||||
.into_par_iter()
|
||||
.map(|x| self.get_output_and_values(x))
|
||||
.rev()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Computes 64 turns of the stream, outputting the 64 bits all at once in a
|
||||
/// Vec (first value is oldest, last is newest)
|
||||
pub fn next_64(&mut self) -> Vec<Ciphertext> {
|
||||
let mut values = self.get_64_output_and_values();
|
||||
|
||||
let mut ret = Vec::<Ciphertext>::with_capacity(64);
|
||||
while let Some([o, a, b, c]) = values.pop() {
|
||||
ret.push(o);
|
||||
self.a.push(a);
|
||||
self.b.push(b);
|
||||
self.c.push(c);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn get_internal_server_key(&self) -> &ServerKey {
|
||||
&self.internal_server_key
|
||||
}
|
||||
|
||||
pub fn get_casting_key(&self) -> &KeySwitchingKey {
|
||||
&self.transciphering_casting_key
|
||||
}
|
||||
|
||||
pub fn get_hl_server_key(&self) -> &tfhe::ServerKey {
|
||||
&self.hl_server_key
|
||||
}
|
||||
}
|
||||
@@ -39,8 +39,10 @@ parser.add_argument('--walk-subdirs', dest='walk_subdirs', action='store_true',
|
||||
help='Check for results in subdirectories')
|
||||
parser.add_argument('--key-sizes', dest='key_sizes', action='store_true',
|
||||
help='Parse only the results regarding keys size measurements')
|
||||
parser.add_argument('--key-gen', dest='key_gen', action='store_true',
|
||||
help='Parse only the results regarding keys generation time measurements')
|
||||
parser.add_argument('--throughput', dest='throughput', action='store_true',
|
||||
help='Compute and append number of operations per millisecond and'
|
||||
help='Compute and append number of operations per second and'
|
||||
'operations per dollar')
|
||||
parser.add_argument('--backend', dest='backend', default='cpu',
|
||||
help='Backend on which benchmarks have run')
|
||||
@@ -55,7 +57,7 @@ def recursive_parse(directory, walk_subdirs=False, name_suffix="", compute_throu
|
||||
:param directory: path to directory that contains raw results as :class:`pathlib.Path`
|
||||
:param walk_subdirs: traverse results subdirectories if parameters changes for benchmark case.
|
||||
:param name_suffix: a :class:`str` suffix to apply to each test name found
|
||||
:param compute_throughput: compute number of operations per millisecond and operations per
|
||||
:param compute_throughput: compute number of operations per second and operations per
|
||||
dollar
|
||||
:param hardware_hourly_cost: hourly cost of the hardware used in dollar
|
||||
|
||||
@@ -93,6 +95,7 @@ def recursive_parse(directory, walk_subdirs=False, name_suffix="", compute_throu
|
||||
|
||||
for stat_name, value in parse_estimate_file(subdir).items():
|
||||
test_name_parts = list(filter(None, [test_name, stat_name, name_suffix]))
|
||||
|
||||
result_values.append(
|
||||
_create_point(
|
||||
value,
|
||||
@@ -105,12 +108,25 @@ def recursive_parse(directory, walk_subdirs=False, name_suffix="", compute_throu
|
||||
)
|
||||
)
|
||||
|
||||
# This is a special case where PBS are blasted as vector LWE ciphertext with
|
||||
# variable length to saturate the machine. To get the actual throughput we need to
|
||||
# multiply by the length of the vector.
|
||||
if "PBS_throughput" in test_name and "chunk" in test_name:
|
||||
try:
|
||||
multiplier = int(test_name.split("chunk")[0].split("_")[-1])
|
||||
except ValueError:
|
||||
parsing_failures.append((full_name,
|
||||
"failed to extract throughput multiplier"))
|
||||
continue
|
||||
else:
|
||||
multiplier = 1
|
||||
|
||||
if stat_name == "mean" and compute_throughput:
|
||||
test_suffix = "ops-per-ms"
|
||||
test_suffix = "ops-per-sec"
|
||||
test_name_parts.append(test_suffix)
|
||||
result_values.append(
|
||||
_create_point(
|
||||
compute_ops_per_millisecond(value),
|
||||
multiplier * compute_ops_per_second(value),
|
||||
"_".join(test_name_parts),
|
||||
bench_class,
|
||||
"throughput",
|
||||
@@ -126,7 +142,7 @@ def recursive_parse(directory, walk_subdirs=False, name_suffix="", compute_throu
|
||||
test_name_parts.append(test_suffix)
|
||||
result_values.append(
|
||||
_create_point(
|
||||
compute_ops_per_dollar(value, hardware_hourly_cost),
|
||||
multiplier * compute_ops_per_dollar(value, hardware_hourly_cost),
|
||||
"_".join(test_name_parts),
|
||||
bench_class,
|
||||
"throughput",
|
||||
@@ -177,9 +193,9 @@ def parse_estimate_file(directory):
|
||||
}
|
||||
|
||||
|
||||
def parse_key_sizes(result_file):
|
||||
def _parse_key_results(result_file, bench_type):
|
||||
"""
|
||||
Parse file containing key sizes results. The file must be formatted as CSV.
|
||||
Parse file containing results about operation on keys. The file must be formatted as CSV.
|
||||
|
||||
:param result_file: results file as :class:`pathlib.Path`
|
||||
|
||||
@@ -202,13 +218,35 @@ def parse_key_sizes(result_file):
|
||||
"test": test_name,
|
||||
"name": display_name,
|
||||
"class": "keygen",
|
||||
"type": "keysize",
|
||||
"type": bench_type,
|
||||
"operator": operator,
|
||||
"params": params})
|
||||
|
||||
return result_values, parsing_failures
|
||||
|
||||
|
||||
def parse_key_sizes(result_file):
|
||||
"""
|
||||
Parse file containing key sizes results. The file must be formatted as CSV.
|
||||
|
||||
:param result_file: results file as :class:`pathlib.Path`
|
||||
|
||||
:return: tuple of :class:`list` as (data points, parsing failures)
|
||||
"""
|
||||
return _parse_key_results(result_file, "keysize")
|
||||
|
||||
|
||||
def parse_key_gen_time(result_file):
|
||||
"""
|
||||
Parse file containing key generation time results. The file must be formatted as CSV.
|
||||
|
||||
:param result_file: results file as :class:`pathlib.Path`
|
||||
|
||||
:return: tuple of :class:`list` as (data points, parsing failures)
|
||||
"""
|
||||
return _parse_key_results(result_file, "latency")
|
||||
|
||||
|
||||
def get_parameters(bench_id):
|
||||
"""
|
||||
Get benchmarks parameters recorded for a given benchmark case.
|
||||
@@ -242,15 +280,15 @@ def compute_ops_per_dollar(data_point, product_hourly_cost):
|
||||
return ONE_HOUR_IN_NANOSECONDS / (product_hourly_cost * data_point)
|
||||
|
||||
|
||||
def compute_ops_per_millisecond(data_point):
|
||||
def compute_ops_per_second(data_point):
|
||||
"""
|
||||
Compute numbers of operations per millisecond for a given ``data_point``.
|
||||
Compute numbers of operations per second for a given ``data_point``.
|
||||
|
||||
:param data_point: timing value measured during benchmark in nanoseconds
|
||||
|
||||
:return: number of operations per millisecond
|
||||
:return: number of operations per second
|
||||
"""
|
||||
return 1E6 / data_point
|
||||
return 1E9 / data_point
|
||||
|
||||
|
||||
def _parse_file_to_json(directory, filename):
|
||||
@@ -301,7 +339,7 @@ def check_mandatory_args(input_args):
|
||||
for arg_name in vars(input_args):
|
||||
if arg_name in ["results_dir", "output_file", "name_suffix",
|
||||
"append_results", "walk_subdirs", "key_sizes",
|
||||
"throughput"]:
|
||||
"key_gen", "throughput"]:
|
||||
continue
|
||||
if not getattr(input_args, arg_name):
|
||||
missing_args.append(arg_name)
|
||||
@@ -318,7 +356,15 @@ if __name__ == "__main__":
|
||||
|
||||
#failures = []
|
||||
raw_results = pathlib.Path(args.results)
|
||||
if not args.key_sizes:
|
||||
if args.key_sizes or args.key_gen:
|
||||
if args.key_sizes:
|
||||
print("Parsing key sizes results... ")
|
||||
results, failures = parse_key_sizes(raw_results)
|
||||
|
||||
if args.key_gen:
|
||||
print("Parsing key generation time results... ")
|
||||
results, failures = parse_key_gen_time(raw_results)
|
||||
else:
|
||||
print("Parsing benchmark results... ")
|
||||
hardware_cost = None
|
||||
if args.throughput:
|
||||
@@ -334,9 +380,7 @@ if __name__ == "__main__":
|
||||
|
||||
results, failures = recursive_parse(raw_results, args.walk_subdirs, args.name_suffix,
|
||||
args.throughput, hardware_cost)
|
||||
else:
|
||||
print("Parsing key sizes results... ")
|
||||
results, failures = parse_key_sizes(raw_results)
|
||||
|
||||
print("Parsing results done")
|
||||
|
||||
output_file = pathlib.Path(args.output_file)
|
||||
|
||||
76
ci/lattice_estimator.sage
Executable file
76
ci/lattice_estimator.sage
Executable file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
lattice_estimator
|
||||
-----------------
|
||||
|
||||
Test cryptographic parameters set against several attacks to estimate their security level.
|
||||
"""
|
||||
import pathlib
|
||||
import sys
|
||||
sys.path.insert(1, 'lattice-estimator')
|
||||
from estimator import *
|
||||
|
||||
|
||||
model = RC.BDGL16
|
||||
|
||||
def check_security(filename):
|
||||
"""
|
||||
Run lattice estimator to determine if a parameters set is secure or not.
|
||||
|
||||
:param filename: name of the file containing parameters set
|
||||
|
||||
:return: :class:`list` of parameters to update
|
||||
"""
|
||||
filepath = pathlib.Path("ci", filename)
|
||||
load(filepath)
|
||||
print(f"Parsing parameters in {filepath}")
|
||||
|
||||
to_update = []
|
||||
|
||||
for param in all_params:
|
||||
if param.tag.startswith("TFHE_LIB_PARAMETERS"):
|
||||
# This third-party parameters set is known to be less secure, just skip the analysis.
|
||||
continue
|
||||
|
||||
print(f"\t{param.tag}...\t", end= "")
|
||||
|
||||
try:
|
||||
# The lattice estimator is not able to manage such large dimension.
|
||||
# If we have the security for smaller `n` then we have security for larger ones.
|
||||
if param.n > 16384:
|
||||
param = param.updated(n = 16384)
|
||||
|
||||
usvp_level = LWE.primal_usvp(param, red_cost_model = model)
|
||||
dual_level = LWE.dual_hybrid(param, red_cost_model = model)
|
||||
|
||||
estimator_level = log(min(usvp_level["rop"], dual_level["rop"]),2 )
|
||||
security_level = f"security level = {estimator_level} bits"
|
||||
if estimator_level < 127:
|
||||
print("FAIL\t({security_level})")
|
||||
reason = f"attained {security_level} target is 128 bits"
|
||||
to_update.append((param, reason))
|
||||
continue
|
||||
except Exception as err:
|
||||
print("FAIL")
|
||||
to_update.append((param, f"{repr(err)}"))
|
||||
else:
|
||||
print(f"OK\t({security_level})")
|
||||
|
||||
return to_update
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
params_to_update = []
|
||||
|
||||
for params_filename in ("boolean_parameters_lattice_estimator.sage",
|
||||
"shortint_classic_parameters_lattice_estimator.sage",
|
||||
"shortint_multi_bit_parameters_lattice_estimator.sage"):
|
||||
params_to_update.extend(check_security(params_filename))
|
||||
|
||||
if params_to_update:
|
||||
print("Some parameters need update")
|
||||
print("----------------------------")
|
||||
for param, reason in params_to_update:
|
||||
print(f"[{param.tag}] reason: {reason} (param)")
|
||||
sys.exit(int(1)) # Explicit conversion is needed to make this call work
|
||||
else:
|
||||
print("All parameters passed the security check")
|
||||
99
ci/parse_integer_benches_to_csv.py
Normal file
99
ci/parse_integer_benches_to_csv.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import json
|
||||
|
||||
|
||||
def main(args):
|
||||
criterion_dir = Path(args.criterion_dir)
|
||||
output_file = Path(args.output_file)
|
||||
|
||||
data = []
|
||||
for json_file in sorted(criterion_dir.glob("**/*.json")):
|
||||
if json_file.parent.name == "base" or json_file.name != "benchmark.json":
|
||||
continue
|
||||
|
||||
try:
|
||||
bench_data = json.loads(json_file.read_text())
|
||||
estimate_file = json_file.with_name("estimates.json")
|
||||
estimate_data = json.loads(estimate_file.read_text())
|
||||
|
||||
bench_function_id = bench_data["function_id"]
|
||||
|
||||
split = bench_function_id.split("::")
|
||||
(_, function_name, parameter_set, bits) = split
|
||||
|
||||
if "_scalar_" in bits:
|
||||
(bits, scalar) = bits.split("_bits_scalar_")
|
||||
bits = int(bits)
|
||||
scalar = int(scalar)
|
||||
else:
|
||||
(bits, _) = bits.split("_")
|
||||
bits = int(bits)
|
||||
scalar = None
|
||||
|
||||
estimate_mean_ms = estimate_data["mean"]["point_estimate"] / 1000000
|
||||
estimate_lower_bound_ms = (
|
||||
estimate_data["mean"]["confidence_interval"]["lower_bound"] / 1000000
|
||||
)
|
||||
estimate_upper_bound_ms = (
|
||||
estimate_data["mean"]["confidence_interval"]["upper_bound"] / 1000000
|
||||
)
|
||||
|
||||
data.append(
|
||||
(
|
||||
function_name,
|
||||
parameter_set,
|
||||
bits,
|
||||
scalar,
|
||||
estimate_mean_ms,
|
||||
estimate_lower_bound_ms,
|
||||
estimate_upper_bound_ms,
|
||||
)
|
||||
)
|
||||
except:
|
||||
pass
|
||||
|
||||
if len(data) == 0:
|
||||
print("No integer bench found, skipping writing output file")
|
||||
return
|
||||
|
||||
with open(output_file, "w", encoding="utf-8") as output:
|
||||
output.write(
|
||||
"function_name,parameter_set,bits,scalar,mean_ms,"
|
||||
"confidence_interval_lower_bound_ms,confidence_interval_upper_bound_ms\n"
|
||||
)
|
||||
# Sort by func_name, bit width and then parameters
|
||||
data.sort(key=lambda x: (x[0], x[2], x[1]))
|
||||
|
||||
for dat in data:
|
||||
(
|
||||
function_name,
|
||||
parameter_set,
|
||||
bits,
|
||||
scalar,
|
||||
estimate_mean_ms,
|
||||
estimate_lower_bound_ms,
|
||||
estimate_upper_bound_ms,
|
||||
) = dat
|
||||
output.write(
|
||||
f"{function_name},{parameter_set},{bits},{scalar},{estimate_mean_ms},"
|
||||
f"{estimate_lower_bound_ms},{estimate_upper_bound_ms}\n"
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("Parse criterion results to csv file")
|
||||
parser.add_argument(
|
||||
"--criterion-dir",
|
||||
type=str,
|
||||
default="target/criterion",
|
||||
help="Where to look for criterion result json files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output-file",
|
||||
type=str,
|
||||
default="parsed_benches.csv",
|
||||
help="Path of the output file, will be csv formatted",
|
||||
)
|
||||
|
||||
main(parser.parse_args())
|
||||
54
ci/slab.toml
54
ci/slab.toml
@@ -1,11 +1,16 @@
|
||||
[profile.cpu-big]
|
||||
region = "eu-west-3"
|
||||
image_id = "ami-04deffe45b5b236fd"
|
||||
image_id = "ami-051942e4055555752"
|
||||
instance_type = "m6i.32xlarge"
|
||||
|
||||
[profile.cpu-small]
|
||||
region = "eu-west-3"
|
||||
image_id = "ami-051942e4055555752"
|
||||
instance_type = "m6i.4xlarge"
|
||||
|
||||
[profile.bench]
|
||||
region = "eu-west-3"
|
||||
image_id = "ami-04deffe45b5b236fd"
|
||||
image_id = "ami-051942e4055555752"
|
||||
instance_type = "m6i.metal"
|
||||
|
||||
[command.cpu_test]
|
||||
@@ -18,11 +23,41 @@ workflow = "aws_tfhe_integer_tests.yml"
|
||||
profile = "cpu-big"
|
||||
check_run_name = "CPU Integer AWS Tests"
|
||||
|
||||
[command.cpu_multi_bit_test]
|
||||
workflow = "aws_tfhe_multi_bit_tests.yml"
|
||||
profile = "cpu-big"
|
||||
check_run_name = "CPU AWS Multi Bit Tests"
|
||||
|
||||
[command.cpu_wasm_test]
|
||||
workflow = "aws_tfhe_wasm_tests.yml"
|
||||
profile = "cpu-small"
|
||||
check_run_name = "CPU AWS WASM Tests"
|
||||
|
||||
[command.cpu_fast_test]
|
||||
workflow = "aws_tfhe_fast_tests.yml"
|
||||
profile = "cpu-big"
|
||||
check_run_name = "CPU AWS Fast Tests"
|
||||
|
||||
[command.integer_full_bench]
|
||||
workflow = "integer_full_benchmark.yml"
|
||||
profile = "bench"
|
||||
check_run_name = "Integer CPU AWS Benchmarks Full Suite"
|
||||
|
||||
[command.integer_bench]
|
||||
workflow = "integer_benchmark.yml"
|
||||
profile = "bench"
|
||||
check_run_name = "Integer CPU AWS Benchmarks"
|
||||
|
||||
[command.integer_multi_bit_bench]
|
||||
workflow = "integer_multi_bit_benchmark.yml"
|
||||
profile = "bench"
|
||||
check_run_name = "Integer multi bit CPU AWS Benchmarks"
|
||||
|
||||
[command.shortint_full_bench]
|
||||
workflow = "shortint_full_benchmark.yml"
|
||||
profile = "bench"
|
||||
check_run_name = "Shortint CPU AWS Benchmarks Full Suite"
|
||||
|
||||
[command.shortint_bench]
|
||||
workflow = "shortint_benchmark.yml"
|
||||
profile = "bench"
|
||||
@@ -37,3 +72,18 @@ check_run_name = "Boolean CPU AWS Benchmarks"
|
||||
workflow = "pbs_benchmark.yml"
|
||||
profile = "bench"
|
||||
check_run_name = "PBS CPU AWS Benchmarks"
|
||||
|
||||
[command.wasm_client_bench]
|
||||
workflow = "wasm_client_benchmark.yml"
|
||||
profile = "cpu-small"
|
||||
check_run_name = "WASM Client AWS Benchmarks"
|
||||
|
||||
[command.csprng_randomness_testing]
|
||||
workflow = "csprng_randomness_testing.yml"
|
||||
profile = "cpu-small"
|
||||
check_run_name = "CSPRNG randomness testing"
|
||||
|
||||
[command.code_coverage]
|
||||
workflow = "code_coverage.yml"
|
||||
profile = "cpu-small"
|
||||
check_run_name = "Code coverage"
|
||||
|
||||
4
codecov.yml
Normal file
4
codecov.yml
Normal file
@@ -0,0 +1,4 @@
|
||||
coverage:
|
||||
status:
|
||||
# Disable patch checks in GitHub until all tfhe-rs layers have coverage implemented.
|
||||
patch: false
|
||||
53
concrete-csprng/Cargo.toml
Normal file
53
concrete-csprng/Cargo.toml
Normal file
@@ -0,0 +1,53 @@
|
||||
[package]
|
||||
name = "concrete-csprng"
|
||||
version = "0.4.0"
|
||||
edition = "2021"
|
||||
license = "BSD-3-Clause-Clear"
|
||||
description = "Cryptographically Secure PRNG used in the TFHE-rs library."
|
||||
homepage = "https://zama.ai/"
|
||||
documentation = "https://docs.zama.ai/tfhe-rs"
|
||||
repository = "https://github.com/zama-ai/tfhe-rs"
|
||||
readme = "README.md"
|
||||
keywords = ["fully", "homomorphic", "encryption", "fhe", "cryptography"]
|
||||
rust-version = "1.72"
|
||||
|
||||
[dependencies]
|
||||
aes = "0.8.2"
|
||||
rayon = { version = "1.5.0", optional = true }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
libc = "0.2.133"
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.8.3"
|
||||
criterion = "0.5.1"
|
||||
clap = "=4.4.4"
|
||||
|
||||
[features]
|
||||
parallel = ["rayon"]
|
||||
seeder_x86_64_rdseed = []
|
||||
seeder_unix = []
|
||||
generator_x86_64_aesni = []
|
||||
generator_fallback = []
|
||||
generator_aarch64_aes = []
|
||||
|
||||
x86_64 = [
|
||||
"parallel",
|
||||
"seeder_x86_64_rdseed",
|
||||
"generator_x86_64_aesni",
|
||||
"generator_fallback",
|
||||
]
|
||||
x86_64-unix = ["x86_64", "seeder_unix"]
|
||||
aarch64 = ["parallel", "generator_aarch64_aes", "generator_fallback"]
|
||||
aarch64-unix = ["aarch64", "seeder_unix"]
|
||||
|
||||
[[bench]]
|
||||
name = "benchmark"
|
||||
path = "benches/benchmark.rs"
|
||||
harness = false
|
||||
required-features = ["seeder_x86_64_rdseed", "generator_x86_64_aesni"]
|
||||
|
||||
[[example]]
|
||||
name = "generate"
|
||||
path = "examples/generate.rs"
|
||||
required-features = ["seeder_unix", "generator_fallback"]
|
||||
28
concrete-csprng/LICENSE
Normal file
28
concrete-csprng/LICENSE
Normal file
@@ -0,0 +1,28 @@
|
||||
BSD 3-Clause Clear License
|
||||
|
||||
Copyright © 2023 ZAMA.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or other
|
||||
materials provided with the distribution.
|
||||
|
||||
3. Neither the name of ZAMA nor the names of its contributors may be used to endorse
|
||||
or promote products derived from this software without specific prior written permission.
|
||||
|
||||
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE.
|
||||
THIS SOFTWARE IS PROVIDED BY THE ZAMA AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
|
||||
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
|
||||
ZAMA OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
|
||||
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
|
||||
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
23
concrete-csprng/README.md
Normal file
23
concrete-csprng/README.md
Normal file
@@ -0,0 +1,23 @@
|
||||
# Concrete CSPRNG
|
||||
|
||||
This crate contains a fast *Cryptographically Secure Pseudoramdon Number Generator*, used in the
|
||||
['concrete-core'](https://crates.io/crates/concrete-core) library, you can find it [here](../concrete-core/) in this repo.
|
||||
|
||||
The implementation is based on the AES blockcipher used in CTR mode, as described in the ISO/IEC
|
||||
18033-4 standard.
|
||||
|
||||
Two implementations are available, an accelerated one on x86_64 CPUs with the `aes` feature and the `sse2` feature, and a pure software one that can be used on other platforms.
|
||||
|
||||
The crate also makes two seeders available, one needing the x86_64 feature `rdseed` and another one based on the Unix random device `/dev/random` the latter requires the user to provide a secret.
|
||||
|
||||
## Running the benchmarks
|
||||
|
||||
To execute the benchmarks on an x86_64 platform:
|
||||
```shell
|
||||
RUSTFLAGS="-Ctarget-cpu=native" cargo bench --features=seeder_x86_64_rdseed,generator_x86_64_aesni
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This software is distributed under the BSD-3-Clause-Clear license. If you have any questions,
|
||||
please contact us at `hello@zama.ai`.
|
||||
54
concrete-csprng/benches/benchmark.rs
Normal file
54
concrete-csprng/benches/benchmark.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use concrete_csprng::generators::{
|
||||
AesniRandomGenerator, BytesPerChild, ChildrenCount, RandomGenerator,
|
||||
};
|
||||
use concrete_csprng::seeders::{RdseedSeeder, Seeder};
|
||||
use criterion::{black_box, criterion_group, criterion_main, Criterion};
|
||||
|
||||
// The number of bytes to generate during one benchmark iteration.
|
||||
const N_GEN: usize = 1_000_000;
|
||||
|
||||
fn parent_generate(c: &mut Criterion) {
|
||||
let mut seeder = RdseedSeeder;
|
||||
let mut generator = AesniRandomGenerator::new(seeder.seed());
|
||||
c.bench_function("parent_generate", |b| {
|
||||
b.iter(|| {
|
||||
(0..N_GEN).for_each(|_| {
|
||||
generator.next();
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
fn child_generate(c: &mut Criterion) {
|
||||
let mut seeder = RdseedSeeder;
|
||||
let mut generator = AesniRandomGenerator::new(seeder.seed());
|
||||
let mut generator = generator
|
||||
.try_fork(ChildrenCount(1), BytesPerChild(N_GEN * 10_000))
|
||||
.unwrap()
|
||||
.next()
|
||||
.unwrap();
|
||||
c.bench_function("child_generate", |b| {
|
||||
b.iter(|| {
|
||||
(0..N_GEN).for_each(|_| {
|
||||
generator.next();
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
fn fork(c: &mut Criterion) {
|
||||
let mut seeder = RdseedSeeder;
|
||||
let mut generator = AesniRandomGenerator::new(seeder.seed());
|
||||
c.bench_function("fork", |b| {
|
||||
b.iter(|| {
|
||||
black_box(
|
||||
generator
|
||||
.try_fork(ChildrenCount(2048), BytesPerChild(2048))
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, parent_generate, child_generate, fork);
|
||||
criterion_main!(benches);
|
||||
112
concrete-csprng/build.rs
Normal file
112
concrete-csprng/build.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
// To have clear error messages during compilation about why some piece of code may not be available
|
||||
// we decided to check the features compatibility with the target configuration in this script.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
|
||||
// See https://doc.rust-lang.org/reference/conditional-compilation.html#target_arch for various
|
||||
// compilation configuration
|
||||
|
||||
// Can be easily extended if needed
|
||||
pub struct FeatureRequirement {
|
||||
pub feature_name: &'static str,
|
||||
// target_arch requirement
|
||||
pub feature_req_target_arch: Option<&'static str>,
|
||||
// target_family requirement
|
||||
pub feature_req_target_family: Option<&'static str>,
|
||||
}
|
||||
|
||||
// We implement a version of default that is const which is not possible through the Default trait
|
||||
impl FeatureRequirement {
|
||||
// As we cannot use cfg!(feature = "feature_name") with something else than a literal, we need
|
||||
// a reference to the HashMap we populate with the enabled features
|
||||
fn is_activated(&self, build_activated_features: &HashMap<&'static str, bool>) -> bool {
|
||||
*build_activated_features.get(self.feature_name).unwrap()
|
||||
}
|
||||
|
||||
// panics if the requirements are not met
|
||||
fn check_requirements(&self) {
|
||||
let target_arch = get_target_arch_cfg();
|
||||
if let Some(feature_req_target_arch) = self.feature_req_target_arch {
|
||||
if feature_req_target_arch != target_arch {
|
||||
panic!(
|
||||
"Feature `{}` requires target_arch `{}`, current cfg: `{}`",
|
||||
self.feature_name, feature_req_target_arch, target_arch
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
let target_family = get_target_family_cfg();
|
||||
if let Some(feature_req_target_family) = self.feature_req_target_family {
|
||||
if feature_req_target_family != target_family {
|
||||
panic!(
|
||||
"Feature `{}` requires target_family `{}`, current cfg: `{}`",
|
||||
self.feature_name, feature_req_target_family, target_family
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// const vecs are not yet a thing so use a fixed size array (update the array size when adding
|
||||
// requirements)
|
||||
static FEATURE_REQUIREMENTS: [FeatureRequirement; 4] = [
|
||||
FeatureRequirement {
|
||||
feature_name: "seeder_x86_64_rdseed",
|
||||
feature_req_target_arch: Some("x86_64"),
|
||||
feature_req_target_family: None,
|
||||
},
|
||||
FeatureRequirement {
|
||||
feature_name: "generator_x86_64_aesni",
|
||||
feature_req_target_arch: Some("x86_64"),
|
||||
feature_req_target_family: None,
|
||||
},
|
||||
FeatureRequirement {
|
||||
feature_name: "seeder_unix",
|
||||
feature_req_target_arch: None,
|
||||
feature_req_target_family: Some("unix"),
|
||||
},
|
||||
FeatureRequirement {
|
||||
feature_name: "generator_aarch64_aes",
|
||||
feature_req_target_arch: Some("aarch64"),
|
||||
feature_req_target_family: None,
|
||||
},
|
||||
];
|
||||
|
||||
// For a "feature_name" feature_cfg!("feature_name") expands to
|
||||
// ("feature_name", cfg!(feature = "feature_name"))
|
||||
macro_rules! feature_cfg {
|
||||
($feat_name:literal) => {
|
||||
($feat_name, cfg!(feature = $feat_name))
|
||||
};
|
||||
}
|
||||
|
||||
// Static HashMap would require an additional crate (phf or lazy static e.g.), so we just write a
|
||||
// function that returns the HashMap we are interested in
|
||||
fn get_feature_enabled_status() -> HashMap<&'static str, bool> {
|
||||
HashMap::from([
|
||||
feature_cfg!("seeder_x86_64_rdseed"),
|
||||
feature_cfg!("generator_x86_64_aesni"),
|
||||
feature_cfg!("seeder_unix"),
|
||||
feature_cfg!("generator_aarch64_aes"),
|
||||
])
|
||||
}
|
||||
|
||||
// See https://stackoverflow.com/a/43435335/18088947 for the inspiration of this code
|
||||
fn get_target_arch_cfg() -> String {
|
||||
env::var("CARGO_CFG_TARGET_ARCH").expect("CARGO_CFG_TARGET_ARCH is not set")
|
||||
}
|
||||
|
||||
fn get_target_family_cfg() -> String {
|
||||
env::var("CARGO_CFG_TARGET_FAMILY").expect("CARGO_CFG_TARGET_FAMILY is not set")
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let feature_enabled_status = get_feature_enabled_status();
|
||||
|
||||
// This will panic if some requirements for a feature are not met
|
||||
FEATURE_REQUIREMENTS
|
||||
.iter()
|
||||
.filter(|&req| FeatureRequirement::is_activated(req, &feature_enabled_status))
|
||||
.for_each(FeatureRequirement::check_requirements);
|
||||
}
|
||||
113
concrete-csprng/examples/generate.rs
Normal file
113
concrete-csprng/examples/generate.rs
Normal file
@@ -0,0 +1,113 @@
|
||||
//! This program uses the concrete csprng to generate an infinite stream of random bytes on
|
||||
//! the program stdout. It can also generate a fixed number of bytes by passing a value along the
|
||||
//! optional argument `--bytes_total`. For testing purpose.
|
||||
use clap::{value_parser, Arg, Command};
|
||||
#[cfg(feature = "generator_x86_64_aesni")]
|
||||
use concrete_csprng::generators::AesniRandomGenerator as ActivatedRandomGenerator;
|
||||
#[cfg(feature = "generator_aarch64_aes")]
|
||||
use concrete_csprng::generators::NeonAesRandomGenerator as ActivatedRandomGenerator;
|
||||
#[cfg(all(
|
||||
not(feature = "generator_x86_64_aesni"),
|
||||
not(feature = "generator_aarch64_aes"),
|
||||
feature = "generator_fallback"
|
||||
))]
|
||||
use concrete_csprng::generators::SoftwareRandomGenerator as ActivatedRandomGenerator;
|
||||
|
||||
use concrete_csprng::generators::RandomGenerator;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
use concrete_csprng::seeders::AppleSecureEnclaveSeeder as ActivatedSeeder;
|
||||
#[cfg(all(not(target_os = "macos"), feature = "seeder_x86_64_rdseed"))]
|
||||
use concrete_csprng::seeders::RdseedSeeder as ActivatedSeeder;
|
||||
#[cfg(all(
|
||||
not(target_os = "macos"),
|
||||
not(feature = "seeder_x86_64_rdseed"),
|
||||
feature = "seeder_unix"
|
||||
))]
|
||||
use concrete_csprng::seeders::UnixSeeder as ActivatedSeeder;
|
||||
|
||||
use concrete_csprng::seeders::Seeder;
|
||||
|
||||
use std::io::prelude::*;
|
||||
use std::io::{stdout, StdoutLock};
|
||||
|
||||
fn write_bytes(
|
||||
buffer: &mut [u8],
|
||||
generator: &mut ActivatedRandomGenerator,
|
||||
stdout: &mut StdoutLock<'_>,
|
||||
) -> std::io::Result<()> {
|
||||
buffer.iter_mut().zip(generator).for_each(|(b, g)| *b = g);
|
||||
stdout.write_all(buffer)
|
||||
}
|
||||
|
||||
fn infinite_bytes_generation(
|
||||
buffer: &mut [u8],
|
||||
generator: &mut ActivatedRandomGenerator,
|
||||
stdout: &mut StdoutLock<'_>,
|
||||
) {
|
||||
while write_bytes(buffer, generator, stdout).is_ok() {}
|
||||
}
|
||||
|
||||
fn bytes_generation(
|
||||
bytes_total: usize,
|
||||
buffer: &mut [u8],
|
||||
generator: &mut ActivatedRandomGenerator,
|
||||
stdout: &mut StdoutLock<'_>,
|
||||
) {
|
||||
let quotient = bytes_total / buffer.len();
|
||||
let remaining = bytes_total % buffer.len();
|
||||
|
||||
for _ in 0..quotient {
|
||||
write_bytes(buffer, generator, stdout).unwrap();
|
||||
}
|
||||
|
||||
write_bytes(&mut buffer[0..remaining], generator, stdout).unwrap()
|
||||
}
|
||||
|
||||
pub fn main() {
|
||||
let matches = Command::new(
|
||||
"Generate a stream of random numbers, specify no flags for infinite generation",
|
||||
)
|
||||
.arg(
|
||||
Arg::new("bytes_total")
|
||||
.short('b')
|
||||
.long("bytes_total")
|
||||
.value_parser(value_parser!(usize))
|
||||
.help("Total number of bytes that has to be generated"),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
// Ugly hack to be able to use UnixSeeder
|
||||
#[cfg(all(
|
||||
not(target_os = "macos"),
|
||||
not(feature = "seeder_x86_64_rdseed"),
|
||||
feature = "seeder_unix"
|
||||
))]
|
||||
let new_seeder = || ActivatedSeeder::new(0);
|
||||
#[cfg(not(all(
|
||||
not(target_os = "macos"),
|
||||
not(feature = "seeder_x86_64_rdseed"),
|
||||
feature = "seeder_unix"
|
||||
)))]
|
||||
let new_seeder = || ActivatedSeeder;
|
||||
|
||||
let mut seeder = new_seeder();
|
||||
let seed = seeder.seed();
|
||||
// Don't print on std out
|
||||
eprintln!("seed={seed:?}");
|
||||
let mut generator = ActivatedRandomGenerator::new(seed);
|
||||
let stdout = stdout();
|
||||
let mut buffer = [0u8; 16];
|
||||
|
||||
// lock stdout as there is a single thread running
|
||||
let mut stdout = stdout.lock();
|
||||
|
||||
match matches.get_one::<usize>("bytes_total") {
|
||||
Some(&total) => {
|
||||
bytes_generation(total, &mut buffer, &mut generator, &mut stdout);
|
||||
}
|
||||
None => {
|
||||
infinite_bytes_generation(&mut buffer, &mut generator, &mut stdout);
|
||||
}
|
||||
};
|
||||
}
|
||||
20
concrete-csprng/src/generators/aes_ctr/block_cipher.rs
Normal file
20
concrete-csprng/src/generators/aes_ctr/block_cipher.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use crate::generators::aes_ctr::index::AesIndex;
|
||||
use crate::generators::aes_ctr::BYTES_PER_BATCH;
|
||||
|
||||
/// Represents a key used in the AES block cipher.
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct AesKey(pub u128);
|
||||
|
||||
/// A trait for AES block ciphers.
|
||||
///
|
||||
/// Note:
|
||||
/// -----
|
||||
///
|
||||
/// The block cipher is used in a batched manner (to reduce amortized cost on special hardware).
|
||||
/// For this reason we only expose a `generate_batch` method.
|
||||
pub trait AesBlockCipher: Clone + Send + Sync {
|
||||
/// Instantiate a new generator from a secret key.
|
||||
fn new(key: AesKey) -> Self;
|
||||
/// Generates the batch corresponding to the given index.
|
||||
fn generate_batch(&mut self, index: AesIndex) -> [u8; BYTES_PER_BATCH];
|
||||
}
|
||||
379
concrete-csprng/src/generators/aes_ctr/generic.rs
Normal file
379
concrete-csprng/src/generators/aes_ctr/generic.rs
Normal file
@@ -0,0 +1,379 @@
|
||||
use crate::generators::aes_ctr::block_cipher::{AesBlockCipher, AesKey};
|
||||
use crate::generators::aes_ctr::index::TableIndex;
|
||||
use crate::generators::aes_ctr::states::{BufferPointer, ShiftAction, State};
|
||||
use crate::generators::aes_ctr::BYTES_PER_BATCH;
|
||||
use crate::generators::{ByteCount, BytesPerChild, ChildrenCount, ForkError};
|
||||
|
||||
// Usually, to work with iterators and parallel iterators, we would use opaque types such as
|
||||
// `impl Iterator<..>`. Unfortunately, it is not yet possible to return existential types in
|
||||
// traits, which we would need for `RandomGenerator`. For this reason, we have to use the
|
||||
// full type name where needed. Hence the following trait aliases definition:
|
||||
|
||||
/// A type alias for the children iterator closure type.
|
||||
pub type ChildrenClosure<BlockCipher> =
|
||||
fn((usize, (Box<BlockCipher>, TableIndex, BytesPerChild))) -> AesCtrGenerator<BlockCipher>;
|
||||
|
||||
/// A type alias for the children iterator type.
|
||||
pub type ChildrenIterator<BlockCipher> = std::iter::Map<
|
||||
std::iter::Zip<
|
||||
std::ops::Range<usize>,
|
||||
std::iter::Repeat<(Box<BlockCipher>, TableIndex, BytesPerChild)>,
|
||||
>,
|
||||
ChildrenClosure<BlockCipher>,
|
||||
>;
|
||||
|
||||
/// A type implementing the `RandomGenerator` api using the AES block cipher in counter mode.
|
||||
#[derive(Clone)]
|
||||
pub struct AesCtrGenerator<BlockCipher: AesBlockCipher> {
|
||||
// The block cipher used in the background
|
||||
pub(crate) block_cipher: Box<BlockCipher>,
|
||||
// The state corresponding to the latest outputted byte.
|
||||
pub(crate) state: State,
|
||||
// The last legal index. This makes bound check faster.
|
||||
pub(crate) last: TableIndex,
|
||||
// The buffer containing the current batch of aes calls.
|
||||
pub(crate) buffer: [u8; BYTES_PER_BATCH],
|
||||
}
|
||||
|
||||
#[allow(unused)] // to please clippy when tests are not activated
|
||||
impl<BlockCipher: AesBlockCipher> AesCtrGenerator<BlockCipher> {
|
||||
/// Generates a new csprng.
|
||||
///
|
||||
/// Note :
|
||||
/// ------
|
||||
///
|
||||
/// The `start_index` given as input, points to the first byte that will be outputted by the
|
||||
/// generator. If not given, this one is automatically set to the second table index. The
|
||||
/// first table index is not used to prevent an edge case from happening: since `state` is
|
||||
/// supposed to contain the index of the previous byte, the initial value must be decremented.
|
||||
/// Using the second value prevents wrapping to the max index, which would make the bound
|
||||
/// checking fail.
|
||||
///
|
||||
/// The `bound_index` given as input, points to the first byte that can __not__ be legally
|
||||
/// outputted by the generator. If not given, the bound is automatically set to the last
|
||||
/// table index.
|
||||
pub fn new(
|
||||
key: AesKey,
|
||||
start_index: Option<TableIndex>,
|
||||
bound_index: Option<TableIndex>,
|
||||
) -> AesCtrGenerator<BlockCipher> {
|
||||
AesCtrGenerator::from_block_cipher(
|
||||
Box::new(BlockCipher::new(key)),
|
||||
start_index.unwrap_or(TableIndex::SECOND),
|
||||
bound_index.unwrap_or(TableIndex::LAST),
|
||||
)
|
||||
}
|
||||
|
||||
/// Generates a csprng from an existing block cipher.
|
||||
pub fn from_block_cipher(
|
||||
block_cipher: Box<BlockCipher>,
|
||||
start_index: TableIndex,
|
||||
bound_index: TableIndex,
|
||||
) -> AesCtrGenerator<BlockCipher> {
|
||||
assert!(start_index < bound_index);
|
||||
let last = bound_index.decremented();
|
||||
let buffer = [0u8; BYTES_PER_BATCH];
|
||||
let state = State::new(start_index);
|
||||
AesCtrGenerator {
|
||||
block_cipher,
|
||||
state,
|
||||
last,
|
||||
buffer,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the table index related to the previous random byte.
|
||||
pub fn table_index(&self) -> TableIndex {
|
||||
self.state.table_index()
|
||||
}
|
||||
|
||||
/// Returns the bound of the generator if any.
|
||||
///
|
||||
/// The bound is the table index of the first byte that can not be outputted by the generator.
|
||||
pub fn get_bound(&self) -> TableIndex {
|
||||
self.last.incremented()
|
||||
}
|
||||
|
||||
/// Returns whether the generator is bounded or not.
|
||||
pub fn is_bounded(&self) -> bool {
|
||||
self.get_bound() != TableIndex::LAST
|
||||
}
|
||||
|
||||
/// Computes the number of bytes that can still be outputted by the generator.
|
||||
///
|
||||
/// Note :
|
||||
/// ------
|
||||
///
|
||||
/// Note that `ByteCount` uses the `u128` datatype to store the byte count. Unfortunately, the
|
||||
/// number of remaining bytes is in ⟦0;2¹³² -1⟧. When the number is greater than 2¹²⁸ - 1,
|
||||
/// we saturate the count at 2¹²⁸ - 1.
|
||||
pub fn remaining_bytes(&self) -> ByteCount {
|
||||
TableIndex::distance(&self.last, &self.state.table_index()).unwrap()
|
||||
}
|
||||
|
||||
/// Outputs the next random byte.
|
||||
pub fn generate_next(&mut self) -> u8 {
|
||||
self.next()
|
||||
.expect("Tried to generate a byte after the bound.")
|
||||
}
|
||||
|
||||
/// Tries to fork the current generator into `n_child` generators each able to output
|
||||
/// `child_bytes` random bytes.
|
||||
pub fn try_fork(
|
||||
&mut self,
|
||||
n_children: ChildrenCount,
|
||||
n_bytes: BytesPerChild,
|
||||
) -> Result<ChildrenIterator<BlockCipher>, ForkError> {
|
||||
if n_children.0 == 0 {
|
||||
return Err(ForkError::ZeroChildrenCount);
|
||||
}
|
||||
if n_bytes.0 == 0 {
|
||||
return Err(ForkError::ZeroBytesPerChild);
|
||||
}
|
||||
if !self.is_fork_in_bound(n_children, n_bytes) {
|
||||
return Err(ForkError::ForkTooLarge);
|
||||
}
|
||||
|
||||
// The state currently stored in the parent generator points to the table index of the last
|
||||
// generated byte. The first index to be generated is the next one:
|
||||
let first_index = self.state.table_index().incremented();
|
||||
let output = (0..n_children.0)
|
||||
.zip(std::iter::repeat((
|
||||
self.block_cipher.clone(),
|
||||
first_index,
|
||||
n_bytes,
|
||||
)))
|
||||
.map(
|
||||
// This map is a little weird because we need to cast the closure to a fn pointer
|
||||
// that matches the signature of `ChildrenIterator<BlockCipher>`.
|
||||
// Unfortunately, the compiler does not manage to coerce this one
|
||||
// automatically.
|
||||
(|(i, (block_cipher, first_index, n_bytes))| {
|
||||
// The first index to be outputted by the child is the `first_index` shifted by
|
||||
// the proper amount of `child_bytes`.
|
||||
let child_first_index = first_index.increased(n_bytes.0 * i);
|
||||
// The bound of the child is the first index of its next sibling.
|
||||
let child_bound_index = first_index.increased(n_bytes.0 * (i + 1));
|
||||
AesCtrGenerator::from_block_cipher(
|
||||
block_cipher,
|
||||
child_first_index,
|
||||
child_bound_index,
|
||||
)
|
||||
}) as ChildrenClosure<BlockCipher>,
|
||||
);
|
||||
// The parent next index is the bound of the last child.
|
||||
let next_index = first_index.increased(n_bytes.0 * n_children.0);
|
||||
self.state = State::new(next_index);
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
pub(crate) fn is_fork_in_bound(
|
||||
&self,
|
||||
n_child: ChildrenCount,
|
||||
child_bytes: BytesPerChild,
|
||||
) -> bool {
|
||||
let mut end = self.state.table_index();
|
||||
end.increase(n_child.0 * child_bytes.0);
|
||||
end <= self.last
|
||||
}
|
||||
}
|
||||
|
||||
impl<BlockCipher: AesBlockCipher> Iterator for AesCtrGenerator<BlockCipher> {
|
||||
type Item = u8;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.state.table_index() >= self.last {
|
||||
None
|
||||
} else {
|
||||
match self.state.increment() {
|
||||
ShiftAction::OutputByte(BufferPointer(ptr)) => Some(self.buffer[ptr]),
|
||||
ShiftAction::RefreshBatchAndOutputByte(aes_index, BufferPointer(ptr)) => {
|
||||
self.buffer = self.block_cipher.generate_batch(aes_index);
|
||||
Some(self.buffer[ptr])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod aes_ctr_generic_test {
|
||||
#![allow(unused)] // to please clippy when tests are not activated
|
||||
|
||||
use super::*;
|
||||
use crate::generators::aes_ctr::index::{AesIndex, ByteIndex};
|
||||
use crate::generators::aes_ctr::BYTES_PER_AES_CALL;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
const REPEATS: usize = 1_000_000;
|
||||
|
||||
pub fn any_table_index() -> impl Iterator<Item = TableIndex> {
|
||||
std::iter::repeat_with(|| {
|
||||
TableIndex::new(
|
||||
AesIndex(thread_rng().gen()),
|
||||
ByteIndex(thread_rng().gen::<usize>() % BYTES_PER_AES_CALL),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn any_usize() -> impl Iterator<Item = usize> {
|
||||
std::iter::repeat_with(|| thread_rng().gen())
|
||||
}
|
||||
|
||||
pub fn any_children_count() -> impl Iterator<Item = ChildrenCount> {
|
||||
std::iter::repeat_with(|| ChildrenCount(thread_rng().gen::<usize>() % 2048 + 1))
|
||||
}
|
||||
|
||||
pub fn any_bytes_per_child() -> impl Iterator<Item = BytesPerChild> {
|
||||
std::iter::repeat_with(|| BytesPerChild(thread_rng().gen::<usize>() % 2048 + 1))
|
||||
}
|
||||
|
||||
pub fn any_key() -> impl Iterator<Item = AesKey> {
|
||||
std::iter::repeat_with(|| AesKey(thread_rng().gen()))
|
||||
}
|
||||
|
||||
/// Output a valid fork:
|
||||
/// a table index t,
|
||||
/// a number of children nc,
|
||||
/// a number of bytes per children nb
|
||||
/// and a positive integer i such that:
|
||||
/// increase(t, nc*nb+i) < MAX with MAX the largest table index.
|
||||
///
|
||||
/// Put differently, if we initialize a parent generator at t and fork it with (nc, nb), our
|
||||
/// parent generator current index gets shifted to an index, distant of at least i bytes of
|
||||
/// the max index.
|
||||
pub fn any_valid_fork(
|
||||
) -> impl Iterator<Item = (TableIndex, ChildrenCount, BytesPerChild, usize)> {
|
||||
any_table_index()
|
||||
.zip(any_children_count())
|
||||
.zip(any_bytes_per_child())
|
||||
.zip(any_usize())
|
||||
.map(|(((t, nc), nb), i)| (t, nc, nb, i))
|
||||
.filter(|(t, nc, nb, i)| {
|
||||
TableIndex::distance(&TableIndex::LAST, t).unwrap().0 > (nc.0 * nb.0 + i) as u128
|
||||
})
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the table index of the first child is the same as the table index of
|
||||
/// the parent before the fork.
|
||||
pub fn prop_fork_first_state_table_index<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let original_generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let mut forked_generator = original_generator.clone();
|
||||
let first_child = forked_generator.try_fork(nc, nb).unwrap().next().unwrap();
|
||||
assert_eq!(original_generator.table_index(), first_child.table_index());
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the table index of the first byte outputted by the parent after the
|
||||
/// fork, is the bound of the last child of the fork.
|
||||
pub fn prop_fork_last_bound_table_index<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let mut parent_generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let last_child = parent_generator.try_fork(nc, nb).unwrap().last().unwrap();
|
||||
assert_eq!(
|
||||
parent_generator.table_index().incremented(),
|
||||
last_child.get_bound()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the bound of the parent does not change.
|
||||
pub fn prop_fork_parent_bound_table_index<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let original_generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let mut forked_generator = original_generator.clone();
|
||||
forked_generator.try_fork(nc, nb).unwrap().last().unwrap();
|
||||
assert_eq!(original_generator.get_bound(), forked_generator.get_bound());
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the parent table index is increased of the number of children
|
||||
/// multiplied by the number of bytes per child.
|
||||
pub fn prop_fork_parent_state_table_index<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let original_generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let mut forked_generator = original_generator.clone();
|
||||
forked_generator.try_fork(nc, nb).unwrap().last().unwrap();
|
||||
assert_eq!(
|
||||
forked_generator.table_index(),
|
||||
// Decrement accounts for the fact that the table index stored is the previous one
|
||||
t.increased(nc.0 * nb.0).decremented()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the bytes outputted by the children in the fork order form the same
|
||||
/// sequence the parent would have had yielded no fork had happened.
|
||||
pub fn prop_fork<G: AesBlockCipher>() {
|
||||
for _ in 0..1000 {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let bytes_to_go = nc.0 * nb.0;
|
||||
let original_generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let mut forked_generator = original_generator.clone();
|
||||
let initial_output: Vec<u8> = original_generator.take(bytes_to_go).collect();
|
||||
let forked_output: Vec<u8> = forked_generator
|
||||
.try_fork(nc, nb)
|
||||
.unwrap()
|
||||
.flat_map(|child| child.collect::<Vec<_>>())
|
||||
.collect();
|
||||
assert_eq!(initial_output, forked_output);
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, all children got a number of remaining bytes equals to the number of
|
||||
/// bytes per child given as fork input.
|
||||
pub fn prop_fork_children_remaining_bytes<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let mut generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
assert!(generator
|
||||
.try_fork(nc, nb)
|
||||
.unwrap()
|
||||
.all(|c| c.remaining_bytes().0 == nb.0 as u128));
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the number of remaining bybtes of the parent is reduced by the number
|
||||
/// of children multiplied by the number of bytes per child.
|
||||
pub fn prop_fork_parent_remaining_bytes<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let bytes_to_go = nc.0 * nb.0;
|
||||
let mut generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let before_remaining_bytes = generator.remaining_bytes();
|
||||
let _ = generator.try_fork(nc, nb).unwrap();
|
||||
let after_remaining_bytes = generator.remaining_bytes();
|
||||
assert_eq!(
|
||||
before_remaining_bytes.0 - after_remaining_bytes.0,
|
||||
bytes_to_go as u128
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
389
concrete-csprng/src/generators/aes_ctr/index.rs
Normal file
389
concrete-csprng/src/generators/aes_ctr/index.rs
Normal file
@@ -0,0 +1,389 @@
|
||||
use crate::generators::aes_ctr::BYTES_PER_AES_CALL;
|
||||
use crate::generators::ByteCount;
|
||||
use std::cmp::Ordering;
|
||||
|
||||
/// A structure representing an [aes index](#coarse-grained-pseudo-random-table-lookup).
|
||||
#[derive(Clone, Copy, Debug, PartialOrd, Ord, PartialEq, Eq)]
|
||||
pub struct AesIndex(pub u128);
|
||||
|
||||
/// A structure representing a [byte index](#fine-grained-pseudo-random-table-lookup).
|
||||
#[derive(Clone, Copy, Debug, PartialOrd, Ord, PartialEq, Eq)]
|
||||
pub struct ByteIndex(pub usize);
|
||||
|
||||
/// A structure representing a [table index](#fine-grained-pseudo-random-table-lookup)
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct TableIndex {
|
||||
pub(crate) aes_index: AesIndex,
|
||||
pub(crate) byte_index: ByteIndex,
|
||||
}
|
||||
|
||||
impl TableIndex {
|
||||
/// The first table index.
|
||||
pub const FIRST: TableIndex = TableIndex {
|
||||
aes_index: AesIndex(0),
|
||||
byte_index: ByteIndex(0),
|
||||
};
|
||||
|
||||
/// The second table index.
|
||||
pub const SECOND: TableIndex = TableIndex {
|
||||
aes_index: AesIndex(0),
|
||||
byte_index: ByteIndex(1),
|
||||
};
|
||||
|
||||
/// The last table index.
|
||||
pub const LAST: TableIndex = TableIndex {
|
||||
aes_index: AesIndex(u128::MAX),
|
||||
byte_index: ByteIndex(BYTES_PER_AES_CALL - 1),
|
||||
};
|
||||
|
||||
/// Creates a table index from an aes index and a byte index.
|
||||
#[allow(unused)] // to please clippy when tests are not activated
|
||||
pub fn new(aes_index: AesIndex, byte_index: ByteIndex) -> Self {
|
||||
assert!(byte_index.0 < BYTES_PER_AES_CALL);
|
||||
TableIndex {
|
||||
aes_index,
|
||||
byte_index,
|
||||
}
|
||||
}
|
||||
|
||||
/// Shifts the table index forward of `shift` bytes.
|
||||
pub fn increase(&mut self, shift: usize) {
|
||||
// Compute full shifts to avoid overflows
|
||||
let full_aes_shifts = shift / BYTES_PER_AES_CALL;
|
||||
let shift_remainder = shift % BYTES_PER_AES_CALL;
|
||||
|
||||
// Get the additional shift if any
|
||||
let new_byte_index = self.byte_index.0 + shift_remainder;
|
||||
let full_aes_shifts = full_aes_shifts + new_byte_index / BYTES_PER_AES_CALL;
|
||||
|
||||
// Store the reaminder in the byte index
|
||||
self.byte_index.0 = new_byte_index % BYTES_PER_AES_CALL;
|
||||
|
||||
self.aes_index.0 = self.aes_index.0.wrapping_add(full_aes_shifts as u128);
|
||||
}
|
||||
|
||||
/// Shifts the table index backward of `shift` bytes.
|
||||
pub fn decrease(&mut self, shift: usize) {
|
||||
let remainder = shift % BYTES_PER_AES_CALL;
|
||||
if remainder <= self.byte_index.0 {
|
||||
self.aes_index.0 = self
|
||||
.aes_index
|
||||
.0
|
||||
.wrapping_sub((shift / BYTES_PER_AES_CALL) as u128);
|
||||
self.byte_index.0 -= remainder;
|
||||
} else {
|
||||
self.aes_index.0 = self
|
||||
.aes_index
|
||||
.0
|
||||
.wrapping_sub((shift / BYTES_PER_AES_CALL) as u128 + 1);
|
||||
self.byte_index.0 += BYTES_PER_AES_CALL - remainder;
|
||||
}
|
||||
}
|
||||
|
||||
/// Shifts the table index forward of one byte.
|
||||
pub fn increment(&mut self) {
|
||||
self.increase(1)
|
||||
}
|
||||
|
||||
/// Shifts the table index backward of one byte.
|
||||
pub fn decrement(&mut self) {
|
||||
self.decrease(1)
|
||||
}
|
||||
|
||||
/// Returns the table index shifted forward by `shift` bytes.
|
||||
pub fn increased(mut self, shift: usize) -> Self {
|
||||
self.increase(shift);
|
||||
self
|
||||
}
|
||||
|
||||
/// Returns the table index shifted backward by `shift` bytes.
|
||||
#[allow(unused)] // to please clippy when tests are not activated
|
||||
pub fn decreased(mut self, shift: usize) -> Self {
|
||||
self.decrease(shift);
|
||||
self
|
||||
}
|
||||
|
||||
/// Returns the table index to the next byte.
|
||||
pub fn incremented(mut self) -> Self {
|
||||
self.increment();
|
||||
self
|
||||
}
|
||||
|
||||
/// Returns the table index to the previous byte.
|
||||
pub fn decremented(mut self) -> Self {
|
||||
self.decrement();
|
||||
self
|
||||
}
|
||||
|
||||
/// Returns the distance between two table indices in bytes.
|
||||
///
|
||||
/// Note:
|
||||
/// -----
|
||||
///
|
||||
/// This method assumes that the `larger` input is, well, larger than the `smaller` input. If
|
||||
/// this is not the case, the method returns `None`. Also, note that `ByteCount` uses the
|
||||
/// `u128` datatype to store the byte count. Unfortunately, the number of bytes between two
|
||||
/// table indices is in ⟦0;2¹³² -1⟧. When the distance is greater than 2¹²⁸ - 1, we saturate
|
||||
/// the count at 2¹²⁸ - 1.
|
||||
pub fn distance(larger: &Self, smaller: &Self) -> Option<ByteCount> {
|
||||
match std::cmp::Ord::cmp(larger, smaller) {
|
||||
Ordering::Less => None,
|
||||
Ordering::Equal => Some(ByteCount(0)),
|
||||
Ordering::Greater => {
|
||||
let mut result = larger.aes_index.0 - smaller.aes_index.0;
|
||||
result = result.saturating_mul(BYTES_PER_AES_CALL as u128);
|
||||
result = result.saturating_add(larger.byte_index.0 as u128);
|
||||
result = result.saturating_sub(smaller.byte_index.0 as u128);
|
||||
Some(ByteCount(result))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for TableIndex {}
|
||||
|
||||
impl PartialEq<Self> for TableIndex {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
matches!(self.partial_cmp(other), Some(Ordering::Equal))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd<Self> for TableIndex {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for TableIndex {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
match self.aes_index.cmp(&other.aes_index) {
|
||||
Ordering::Equal => self.byte_index.cmp(&other.byte_index),
|
||||
other => other,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
const REPEATS: usize = 1_000_000;
|
||||
|
||||
fn any_table_index() -> impl Iterator<Item = TableIndex> {
|
||||
std::iter::repeat_with(|| {
|
||||
TableIndex::new(
|
||||
AesIndex(thread_rng().gen()),
|
||||
ByteIndex(thread_rng().gen::<usize>() % BYTES_PER_AES_CALL),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn any_usize() -> impl Iterator<Item = usize> {
|
||||
std::iter::repeat_with(|| thread_rng().gen())
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
/// Verifies that the constructor of `TableIndex` panics when the byte index is too large.
|
||||
fn test_table_index_new_panic() {
|
||||
TableIndex::new(AesIndex(12), ByteIndex(144));
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Verifies that the `TableIndex` wraps nicely with predecessor
|
||||
fn test_table_index_predecessor_edge() {
|
||||
assert_eq!(TableIndex::FIRST.decremented(), TableIndex::LAST);
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Verifies that the `TableIndex` wraps nicely with successor
|
||||
fn test_table_index_successor_edge() {
|
||||
assert_eq!(TableIndex::LAST.incremented(), TableIndex::FIRST);
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check that the table index distance saturates nicely.
|
||||
fn prop_table_index_distance_saturates() {
|
||||
assert_eq!(
|
||||
TableIndex::distance(&TableIndex::LAST, &TableIndex::FIRST)
|
||||
.unwrap()
|
||||
.0,
|
||||
u128::MAX
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t,
|
||||
/// distance(t, t) = Some(0).
|
||||
fn prop_table_index_distance_zero() {
|
||||
for _ in 0..REPEATS {
|
||||
let t = any_table_index().next().unwrap();
|
||||
assert_eq!(TableIndex::distance(&t, &t), Some(ByteCount(0)));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t1, t2 such that t1 < t2,
|
||||
/// distance(t1, t2) = None.
|
||||
fn prop_table_index_distance_wrong_order_none() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t1, t2) = any_table_index()
|
||||
.zip(any_table_index())
|
||||
.find(|(t1, t2)| t1 < t2)
|
||||
.unwrap();
|
||||
assert_eq!(TableIndex::distance(&t1, &t2), None);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t1, t2 such that t1 > t2,
|
||||
/// distance(t1, t2) = Some(v) where v is strictly positive.
|
||||
fn prop_table_index_distance_some_positive() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t1, t2) = any_table_index()
|
||||
.zip(any_table_index())
|
||||
.find(|(t1, t2)| t1 > t2)
|
||||
.unwrap();
|
||||
assert!(matches!(TableIndex::distance(&t1, &t2), Some(ByteCount(v)) if v > 0));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t, positive i such that i < distance (MAX, t) with MAX the largest
|
||||
/// table index,
|
||||
/// distance(t.increased(i), t) = Some(i).
|
||||
fn prop_table_index_distance_increase() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, inc) = any_table_index()
|
||||
.zip(any_usize())
|
||||
.find(|(t, inc)| {
|
||||
(*inc as u128) < TableIndex::distance(&TableIndex::LAST, t).unwrap().0
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
TableIndex::distance(&t.increased(inc), &t).unwrap().0 as usize,
|
||||
inc
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t, t =? t = true.
|
||||
fn prop_table_index_equality() {
|
||||
for _ in 0..REPEATS {
|
||||
let t = any_table_index().next().unwrap();
|
||||
assert_eq!(
|
||||
std::cmp::PartialOrd::partial_cmp(&t, &t),
|
||||
Some(std::cmp::Ordering::Equal)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t, positive i such that i < distance (MAX, t) with MAX the largest
|
||||
/// table index,
|
||||
/// t.increased(i) >? t = true.
|
||||
fn prop_table_index_greater() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, inc) = any_table_index()
|
||||
.zip(any_usize())
|
||||
.find(|(t, inc)| {
|
||||
(*inc as u128) < TableIndex::distance(&TableIndex::LAST, t).unwrap().0
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
std::cmp::PartialOrd::partial_cmp(&t.increased(inc), &t),
|
||||
Some(std::cmp::Ordering::Greater),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t, positive i such that i < distance (t, 0) with MAX the largest
|
||||
/// table index,
|
||||
/// t.decreased(i) <? t = true.
|
||||
fn prop_table_index_less() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, inc) = any_table_index()
|
||||
.zip(any_usize())
|
||||
.find(|(t, inc)| {
|
||||
(*inc as u128) < TableIndex::distance(t, &TableIndex::FIRST).unwrap().0
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
std::cmp::PartialOrd::partial_cmp(&t.decreased(inc), &t),
|
||||
Some(std::cmp::Ordering::Less)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t,
|
||||
/// successor(predecessor(t)) = t.
|
||||
fn prop_table_index_decrement_increment() {
|
||||
for _ in 0..REPEATS {
|
||||
let t = any_table_index().next().unwrap();
|
||||
assert_eq!(t.decremented().incremented(), t);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t,
|
||||
/// predecessor(successor(t)) = t.
|
||||
fn prop_table_index_increment_decrement() {
|
||||
for _ in 0..REPEATS {
|
||||
let t = any_table_index().next().unwrap();
|
||||
assert_eq!(t.incremented().decremented(), t);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t, positive integer i,
|
||||
/// increase(decrease(t, i), i) = t.
|
||||
fn prop_table_index_increase_decrease() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, i) = any_table_index().zip(any_usize()).next().unwrap();
|
||||
assert_eq!(t.increased(i).decreased(i), t);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t, positive integer i,
|
||||
/// decrease(increase(t, i), i) = t.
|
||||
fn prop_table_index_decrease_increase() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, i) = any_table_index().zip(any_usize()).next().unwrap();
|
||||
assert_eq!(t.decreased(i).increased(i), t);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check that a big increase does not overflow
|
||||
fn prop_table_increase_max_no_overflow() {
|
||||
let first = TableIndex::FIRST;
|
||||
// Increase so that ByteIndex is at 1usize
|
||||
let second = first.increased(1);
|
||||
|
||||
// Now increase by usize::MAX, as the underlying byte index stores a usize this may overflow
|
||||
// depending on implementation, ensure it does not overflow
|
||||
let big_increase = second.increased(usize::MAX);
|
||||
let total_full_aes_shifts = (1u128 + usize::MAX as u128) / BYTES_PER_AES_CALL as u128;
|
||||
|
||||
assert_eq!(
|
||||
big_increase,
|
||||
TableIndex::new(AesIndex(total_full_aes_shifts), ByteIndex(0))
|
||||
);
|
||||
}
|
||||
}
|
||||
223
concrete-csprng/src/generators/aes_ctr/mod.rs
Normal file
223
concrete-csprng/src/generators/aes_ctr/mod.rs
Normal file
@@ -0,0 +1,223 @@
|
||||
//! A module implementing the random generator api with batched aes calls.
|
||||
//!
|
||||
//! This module provides a generic [`AesCtrGenerator`] structure which implements the
|
||||
//! [`super::RandomGenerator`] api using the AES block cipher in counter mode. That is, the
|
||||
//! generator holds a state (i.e. counter) which is incremented iteratively, to produce the stream
|
||||
//! of random values:
|
||||
//! ```ascii
|
||||
//! state=0 state=1 state=2
|
||||
//! ╔══↧══╗ ╔══↧══╗ ╔══↧══╗
|
||||
//! key ↦ AES ║ key ↦ AES ║ key ↦ AES ║ ...
|
||||
//! ╚══↧══╝ ╚══↧══╝ ╚══↧══╝
|
||||
//! output0 output1 output2
|
||||
//!
|
||||
//! t=0 t=1 t=2
|
||||
//! ```
|
||||
//!
|
||||
//! The [`AesCtrGenerator`] structure is generic over the AES block ciphers, which are
|
||||
//! represented by the [`AesBlockCipher`] trait. Consequently, implementers only need to implement
|
||||
//! the `AesBlockCipher` trait, to benefit from the whole api of the `AesCtrGenerator` structure.
|
||||
//!
|
||||
//! In the following section, we give details on the implementation of this generic generator.
|
||||
//!
|
||||
//! Coarse-grained pseudo-random lookup table
|
||||
//! =========================================
|
||||
//!
|
||||
//! To generate random values, we use the AES block cipher in counter mode. If we denote f the aes
|
||||
//! encryption function, we have:
|
||||
//! ```ascii
|
||||
//! f: ⟦0;2¹²⁸ -1⟧ X ⟦0;2¹²⁸ -1⟧ ↦ ⟦0;2¹²⁸ -1⟧
|
||||
//! f(secret_key, input) ↦ output
|
||||
//! ```
|
||||
|
||||
//! If we fix the secret key to a value k, we have a function fₖ from ⟦0;2¹²⁸ -1⟧ to ⟦0;2¹²⁸-1⟧,
|
||||
//! transforming the state of the counter into a pseudo random value. Essentially, this fₖ
|
||||
//! function can be considered as a the following lookup table, containing 2¹²⁸ pseudo-random
|
||||
//! values:
|
||||
//! ```ascii
|
||||
//! ╭──────────────┬──────────────┬─────┬──────────────╮
|
||||
//! │ 0 │ 1 │ │ 2¹²⁸ -1 │
|
||||
//! ├──────────────┼──────────────┼─────┼──────────────┤
|
||||
//! │ fₖ(0) │ fₖ(1) │ │ fₖ(2¹²⁸ -1) │
|
||||
//! ╔═══════↧══════╦═══════↧══════╦═════╦═══════↧══════╗
|
||||
//! ║┏━━━━━━━━━━━━┓║┏━━━━━━━━━━━━┓║ ║┏━━━━━━━━━━━━┓║
|
||||
//! ║┃ u128 ┃║┃ u128 ┃║ ... ║┃ u128 ┃║
|
||||
//! ║┗━━━━━━━━━━━━┛║┗━━━━━━━━━━━━┛║ ║┗━━━━━━━━━━━━┛║
|
||||
//! ╚══════════════╩══════════════╩═════╩══════════════╝
|
||||
//! ```
|
||||
//!
|
||||
//! An input to the fₖ function is called an _aes index_ (also called state or counter in the
|
||||
//! standards) of the pseudo-random table. The [`AesIndex`] structure defined in this module
|
||||
//! represents such an index in the code.
|
||||
//!
|
||||
//! Fine-grained pseudo-random table lookup
|
||||
//! =======================================
|
||||
//!
|
||||
//! Since we want to deliver the pseudo-random bytes one by one, we have to come with a finer
|
||||
//! grained indexing. Fortunately, each `u128` value outputted by fₖ can be seen as a table of 16
|
||||
//! `u8`:
|
||||
//! ```ascii
|
||||
//! ╭──────────────┬──────────────┬─────┬──────────────╮
|
||||
//! │ 0 │ 1 │ │ 2¹²⁸ -1 │
|
||||
//! ├──────────────┼──────────────┼─────┼──────────────┤
|
||||
//! │ fₖ(0) │ fₖ(1) │ │ fₖ(2¹²⁸ -1) │
|
||||
//! ╔═══════↧══════╦═══════↧══════╦═════╦═══════↧══════╗
|
||||
//! ║┏━━━━━━━━━━━━┓║┏━━━━━━━━━━━━┓║ ║┏━━━━━━━━━━━━┓║
|
||||
//! ║┃ u128 ┃║┃ u128 ┃║ ║┃ u128 ┃║
|
||||
//! ║┣━━┯━━┯━━━┯━━┫║┣━━┯━━┯━━━┯━━┫║ ... ║┣━━┯━━┯━━━┯━━┫║
|
||||
//! ║┃u8│u8│...│u8┃║┃u8│u8│...│u8┃║ ║┃u8│u8│...│u8┃║
|
||||
//! ║┗━━┷━━┷━━━┷━━┛║┗━━┷━━┷━━━┷━━┛║ ║┗━━┷━━┷━━━┷━━┛║
|
||||
//! ╚══════════════╩══════════════╩═════╩══════════════╝
|
||||
//! ```
|
||||
//!
|
||||
//! We introduce a second function to select a chunk of 8 bits:
|
||||
//! ```ascii
|
||||
//! g: ⟦0;2¹²⁸ -1⟧ X ⟦0;15⟧ ↦ ⟦0;2⁸ -1⟧
|
||||
//! g(big_int, index) ↦ byte
|
||||
//! ```
|
||||
//!
|
||||
//! If we fix the `u128` value to a value e, we have a function gₑ from ⟦0;15⟧ to ⟦0;2⁸ -1⟧
|
||||
//! transforming an index into a pseudo-random byte:
|
||||
//! ```ascii
|
||||
//! ┏━━━━━━━━┯━━━━━━━━┯━━━┯━━━━━━━━┓
|
||||
//! ┃ u8 │ u8 │...│ u8 ┃
|
||||
//! ┗━━━━━━━━┷━━━━━━━━┷━━━┷━━━━━━━━┛
|
||||
//! │ gₑ(0) │ gₑ(1) │ │ gₑ(15) │
|
||||
//! ╰────────┴─────-──┴───┴────────╯
|
||||
//! ```
|
||||
//!
|
||||
//! We call this input to the gₑ function, a _byte index_ of the pseudo-random table. The
|
||||
//! [`ByteIndex`] structure defined in this module represents such an index in the code.
|
||||
//!
|
||||
//! By using both the g and the fₖ functions, we can define a new function l which allows to index
|
||||
//! any byte of the pseudo-random table:
|
||||
//! ```ascii
|
||||
//! l: ⟦0;2¹²⁸ -1⟧ X ⟦0;15⟧ ↦ ⟦0;2⁸ -1⟧
|
||||
//! l(aes_index, byte_index) ↦ g(fₖ(aes_index), byte_index)
|
||||
//! ```
|
||||
//!
|
||||
//! In this sense, any member of ⟦0;2¹²⁸ -1⟧ X ⟦0;15⟧ uniquely defines a byte in this pseudo-random
|
||||
//! table:
|
||||
//! ```ascii
|
||||
//! e = fₖ(a)
|
||||
//! ╔══════════════╦═══════↧══════╦═════╦══════════════╗
|
||||
//! ║┏━━━━━━━━━━━━┓║┏━━━━━━━━━━━━┓║ ║┏━━━━━━━━━━━━┓║
|
||||
//! ║┃ u128 ┃║┃ u128 ┃║ ║┃ u128 ┃║
|
||||
//! ║┣━━┯━━┯━━━┯━━┫║┣━━┯━━┯━━━┯━━┫║ ... ║┣━━┯━━┯━━━┯━━┫║
|
||||
//! ║┃u8│u8│...│u8┃║┃u8│u8│...│u8┃║ ║┃u8│u8│...│u8┃║
|
||||
//! ║┗━━┷━━┷━━━┷━━┛║┗━━┷↥━┷━━━┷━━┛║ ║┗━━┷━━┷━━━┷━━┛║
|
||||
//! ║ ║│ gₑ(b) │║ ║ ║
|
||||
//! ║ ║╰───-────────╯║ ║ ║
|
||||
//! ╚══════════════╩══════════════╩═════╩══════════════╝
|
||||
//! ```
|
||||
//!
|
||||
//! We call this input to the l function, a _table index_ of the pseudo-random table. The
|
||||
//! [`TableIndex`] structure defined in this module represents such an index in the code.
|
||||
//!
|
||||
//! Prngs current table index
|
||||
//! =========================
|
||||
//!
|
||||
//! When created, a prng is given an initial _table index_, denoted (a₀, b₀), which identifies the
|
||||
//! first byte of the table to be outputted by the prng. Then, each time the prng is queried for a
|
||||
//! new value, the byte corresponding to the current _table index_ is returned, and the current
|
||||
//! _table index_ is incremented:
|
||||
//! ```ascii
|
||||
//! e = fₖ(a₀) e = fₖ(a₁)
|
||||
//! ╔═════↧═════╦═══════════╦═════╦═══════════╗ ╔═══════════╦═════↧═════╦═════╦═══════════╗
|
||||
//! ║┏━┯━┯━━━┯━┓║┏━┯━┯━━━┯━┓║ ... ║┏━┯━┯━━━┯━┓║ ║┏━┯━┯━━━┯━┓║┏━┯━┯━━━┯━┓║ ... ║┏━┯━┯━━━┯━┓║
|
||||
//! ║┃ │ │...│ ┃║┃ │ │...│ ┃║ ║┃ │ │...│ ┃║ ║┃ │ │...│ ┃║┃ │ │...│ ┃║ ║┃ │ │...│ ┃║
|
||||
//! ║┗━┷━┷━━━┷↥┛║┗━┷━┷━━━┷━┛║ ║┗━┷━┷━━━┷━┛║ → ║┗━┷━┷━━━┷━┛║┗↥┷━┷━━━┷━┛║ ║┗━┷━┷━━━┷━┛║
|
||||
//! ║│ gₑ(b₀) │║ ║ ║ ║ ║ ║│ gₑ(b₁) │║ ║ ║
|
||||
//! ║╰─────────╯║ ║ ║ ║ ║ ║╰─────────╯║ ║ ║
|
||||
//! ╚═══════════╩═══════════╩═════╩═══════════╝ ╚═══════════╩═══════════╩═════╩═══════════╝
|
||||
//! ```
|
||||
//!
|
||||
//! Prng bound
|
||||
//! ==========
|
||||
//!
|
||||
//! When created, a prng is also given a _bound_ (aₘ, bₘ) , that is a table index which it is not
|
||||
//! allowed to exceed:
|
||||
//! ```ascii
|
||||
//! e = fₖ(a₀)
|
||||
//! ╔═════↧═════╦═══════════╦═════╦═══════════╗
|
||||
//! ║┏━┯━┯━━━┯━┓║┏━┯━┯━━━┯━┓║ ... ║┏━┯━┯━━━┯━┓║
|
||||
//! ║┃ │ │...│ ┃║┃ │╳│...│╳┃║ ║┃╳│╳│...│╳┃║
|
||||
//! ║┗━┷━┷━━━┷↥┛║┗━┷━┷━━━┷━┛║ ║┗━┷━┷━━━┷━┛║ The current byte can be returned.
|
||||
//! ║│ gₑ(b₀) │║ ║ ║ ║
|
||||
//! ║╰─────────╯║ ║ ║ ║
|
||||
//! ╚═══════════╩═══════════╩═════╩═══════════╝
|
||||
//!
|
||||
//! e = fₖ(aₘ)
|
||||
//! ╔═══════════╦═════↧═════╦═════╦═══════════╗
|
||||
//! ║┏━┯━┯━━━┯━┓║┏━┯━┯━━━┯━┓║ ... ║┏━┯━┯━━━┯━┓║
|
||||
//! ║┃ │ │...│ ┃║┃ │╳│...│╳┃║ ║┃╳│╳│...│╳┃║ The table index reached the bound,
|
||||
//! ║┗━┷━┷━━━┷━┛║┗━┷↥┷━━━┷━┛║ ║┗━┷━┷━━━┷━┛║ the current byte can not be
|
||||
//! ║ ║│ gₑ(bₘ) │║ ║ ║ returned.
|
||||
//! ║ ║╰─────────╯║ ║ ║
|
||||
//! ╚═══════════╩═══════════╩═════╩═══════════╝
|
||||
//! ```
|
||||
//!
|
||||
//! Buffering
|
||||
//! =========
|
||||
//!
|
||||
//! Calling the aes function every time we need to output a single byte would be a huge waste of
|
||||
//! resources. In practice, we call aes 8 times in a row, for 8 successive values of aes index, and
|
||||
//! store the results in a buffer. For platforms which have a dedicated aes chip, this allows to
|
||||
//! fill the unit pipeline and reduces the amortized cost of the aes function.
|
||||
//!
|
||||
//! Together with the current table index of the prng, we also store a pointer p (initialized at
|
||||
//! p₀=b₀) to the current byte in the buffer. If we denote v the lookup function we have :
|
||||
//! ```ascii
|
||||
//! e = fₖ(a₀) Buffer(length=128)
|
||||
//! ╔═════╦═══════════╦═════↧═════╦═══════════╦═════╗ ┏━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓
|
||||
//! ║ ... ║┏━┯━┯━━━┯━┓║┏━┯━┯━━━┯━┓║┏━┯━┯━━━┯━┓║ ... ║ ┃▓│▓│▓│▓│▓│▓│▓│▓│...│▓┃
|
||||
//! ║ ║┃ │ │...│ ┃║┃▓│▓│...│▓┃║┃▓│▓│...│▓┃║ ║ ┗━┷↥┷━┷━┷━┷━┷━┷━┷━━━┷━┛
|
||||
//! ║ ║┗━┷━┷━━━┷━┛║┗━┷↥┷━━━┷━┛║┗━┷━┷━━━┷━┛║ ║ │ v(p₀) │
|
||||
//! ║ ║ ║│ gₑ(b₀) │║ ║ ║ ╰─────────────────────╯
|
||||
//! ║ ║ ║╰─────────╯║ ║ ║
|
||||
//! ╚═════╩═══════════╩═══════════╩═══════════╩═════╝
|
||||
//! ```
|
||||
//!
|
||||
//! We call this input to the v function, a _buffer pointer_. The [`BufferPointer`] structure
|
||||
//! defined in this module represents such a pointer in the code.
|
||||
//!
|
||||
//! When the table index is incremented, the buffer pointer is incremented alongside:
|
||||
//! ```ascii
|
||||
//! e = fₖ(a) Buffer(length=128)
|
||||
//! ╔═════╦═══════════╦═════↧═════╦═══════════╦═════╗ ┏━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓
|
||||
//! ║ ... ║┏━┯━┯━━━┯━┓║┏━┯━┯━━━┯━┓║┏━┯━┯━━━┯━┓║ ... ║ ┃▓│▓│▓│▓│▓│▓│▓│▓│...│▓┃
|
||||
//! ║ ║┃ │ │...│ ┃║┃▓│▓│...│▓┃║┃▓│▓│...│▓┃║ ║ ┗━┷━┷↥┷━┷━┷━┷━┷━┷━━━┷━┛
|
||||
//! ║ ║┗━┷━┷━━━┷━┛║┗━┷━┷↥━━┷━┛║┗━┷━┷━━━┷━┛║ ║ │ v(p) │
|
||||
//! ║ ║ ║│ gₑ(b) │║ ║ ║ ╰─────────────────────╯
|
||||
//! ║ ║ ║╰─────────╯║ ║ ║
|
||||
//! ╚═════╩═══════════╩═══════════╩═══════════╩═════╝
|
||||
//! ```
|
||||
//!
|
||||
//! When the buffer pointer is incremented it is checked against the size of the buffer, and if
|
||||
//! necessary, a new batch of aes index values is generated.
|
||||
|
||||
pub const AES_CALLS_PER_BATCH: usize = 8;
|
||||
pub const BYTES_PER_AES_CALL: usize = 128 / 8;
|
||||
pub const BYTES_PER_BATCH: usize = BYTES_PER_AES_CALL * AES_CALLS_PER_BATCH;
|
||||
|
||||
/// A module containing structures to manage table indices.
|
||||
mod index;
|
||||
pub use index::*;
|
||||
|
||||
/// A module containing structures to manage table indices and buffer pointers together properly.
|
||||
mod states;
|
||||
pub use states::*;
|
||||
|
||||
/// A module containing an abstraction for aes block ciphers.
|
||||
mod block_cipher;
|
||||
pub use block_cipher::*;
|
||||
|
||||
/// A module containing a generic implementation of a random generator.
|
||||
mod generic;
|
||||
pub use generic::*;
|
||||
|
||||
/// A module extending `generic` to the `rayon` paradigm.
|
||||
#[cfg(feature = "parallel")]
|
||||
mod parallel;
|
||||
#[cfg(feature = "parallel")]
|
||||
pub use parallel::*;
|
||||
222
concrete-csprng/src/generators/aes_ctr/parallel.rs
Normal file
222
concrete-csprng/src/generators/aes_ctr/parallel.rs
Normal file
@@ -0,0 +1,222 @@
|
||||
use crate::generators::aes_ctr::{
|
||||
AesBlockCipher, AesCtrGenerator, ChildrenClosure, State, TableIndex,
|
||||
};
|
||||
use crate::generators::{BytesPerChild, ChildrenCount, ForkError};
|
||||
|
||||
/// A type alias for the parallel children iterator type.
|
||||
pub type ParallelChildrenIterator<BlockCipher> = rayon::iter::Map<
|
||||
rayon::iter::Zip<
|
||||
rayon::range::Iter<usize>,
|
||||
rayon::iter::RepeatN<(Box<BlockCipher>, TableIndex, BytesPerChild)>,
|
||||
>,
|
||||
fn((usize, (Box<BlockCipher>, TableIndex, BytesPerChild))) -> AesCtrGenerator<BlockCipher>,
|
||||
>;
|
||||
|
||||
impl<BlockCipher: AesBlockCipher> AesCtrGenerator<BlockCipher> {
|
||||
/// Tries to fork the current generator into `n_child` generators each able to output
|
||||
/// `child_bytes` random bytes as a parallel iterator.
|
||||
///
|
||||
/// # Notes
|
||||
///
|
||||
/// This method necessitate the "multithread" feature.
|
||||
pub fn par_try_fork(
|
||||
&mut self,
|
||||
n_children: ChildrenCount,
|
||||
n_bytes: BytesPerChild,
|
||||
) -> Result<ParallelChildrenIterator<BlockCipher>, ForkError>
|
||||
where
|
||||
BlockCipher: Send + Sync,
|
||||
{
|
||||
use rayon::prelude::*;
|
||||
|
||||
if n_children.0 == 0 {
|
||||
return Err(ForkError::ZeroChildrenCount);
|
||||
}
|
||||
if n_bytes.0 == 0 {
|
||||
return Err(ForkError::ZeroBytesPerChild);
|
||||
}
|
||||
if !self.is_fork_in_bound(n_children, n_bytes) {
|
||||
return Err(ForkError::ForkTooLarge);
|
||||
}
|
||||
|
||||
// The state currently stored in the parent generator points to the table index of the last
|
||||
// generated byte. The first index to be generated is the next one :
|
||||
let first_index = self.state.table_index().incremented();
|
||||
let output = (0..n_children.0)
|
||||
.into_par_iter()
|
||||
.zip(rayon::iter::repeatn(
|
||||
(self.block_cipher.clone(), first_index, n_bytes),
|
||||
n_children.0,
|
||||
))
|
||||
.map(
|
||||
// This map is a little weird because we need to cast the closure to a fn pointer
|
||||
// that matches the signature of `ChildrenIterator<BlockCipher>`. Unfortunately,
|
||||
// the compiler does not manage to coerce this one automatically.
|
||||
(|(i, (block_cipher, first_index, n_bytes))| {
|
||||
// The first index to be outputted by the child is the `first_index` shifted by
|
||||
// the proper amount of `child_bytes`.
|
||||
let child_first_index = first_index.increased(n_bytes.0 * i);
|
||||
// The bound of the child is the first index of its next sibling.
|
||||
let child_bound_index = first_index.increased(n_bytes.0 * (i + 1));
|
||||
AesCtrGenerator::from_block_cipher(
|
||||
block_cipher,
|
||||
child_first_index,
|
||||
child_bound_index,
|
||||
)
|
||||
}) as ChildrenClosure<BlockCipher>,
|
||||
);
|
||||
// The parent next index is the bound of the last child.
|
||||
let next_index = first_index.increased(n_bytes.0 * n_children.0);
|
||||
self.state = State::new(next_index);
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod aes_ctr_parallel_generic_tests {
|
||||
|
||||
use super::*;
|
||||
use crate::generators::aes_ctr::aes_ctr_generic_test::{any_key, any_valid_fork};
|
||||
use rayon::prelude::*;
|
||||
|
||||
const REPEATS: usize = 1_000_000;
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the table index of the first child is the same as the table index of
|
||||
/// the parent before the fork.
|
||||
pub fn prop_fork_first_state_table_index<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let original_generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let mut forked_generator = original_generator.clone();
|
||||
let first_child = forked_generator
|
||||
.par_try_fork(nc, nb)
|
||||
.unwrap()
|
||||
.find_first(|_| true)
|
||||
.unwrap();
|
||||
assert_eq!(original_generator.table_index(), first_child.table_index());
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the table index of the first byte outputted by the parent after the
|
||||
/// fork, is the bound of the last child of the fork.
|
||||
pub fn prop_fork_last_bound_table_index<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let mut parent_generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let last_child = parent_generator
|
||||
.par_try_fork(nc, nb)
|
||||
.unwrap()
|
||||
.find_last(|_| true)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
parent_generator.table_index().incremented(),
|
||||
last_child.get_bound()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the bound of the parent does not change.
|
||||
pub fn prop_fork_parent_bound_table_index<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let original_generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let mut forked_generator = original_generator.clone();
|
||||
forked_generator
|
||||
.par_try_fork(nc, nb)
|
||||
.unwrap()
|
||||
.find_last(|_| true)
|
||||
.unwrap();
|
||||
assert_eq!(original_generator.get_bound(), forked_generator.get_bound());
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the parent table index is increased of the number of children
|
||||
/// multiplied by the number of bytes per child.
|
||||
pub fn prop_fork_parent_state_table_index<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let original_generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let mut forked_generator = original_generator.clone();
|
||||
forked_generator
|
||||
.par_try_fork(nc, nb)
|
||||
.unwrap()
|
||||
.find_last(|_| true)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
forked_generator.table_index(),
|
||||
// Decrement accounts for the fact that the table index stored is the previous one
|
||||
t.increased(nc.0 * nb.0).decremented()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the bytes outputted by the children in the fork order form the same
|
||||
/// sequence the parent would have had outputted no fork had happened.
|
||||
pub fn prop_fork<G: AesBlockCipher>() {
|
||||
for _ in 0..1000 {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let bytes_to_go = nc.0 * nb.0;
|
||||
let original_generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let mut forked_generator = original_generator.clone();
|
||||
let initial_output: Vec<u8> = original_generator.take(bytes_to_go).collect();
|
||||
let forked_output: Vec<u8> = forked_generator
|
||||
.par_try_fork(nc, nb)
|
||||
.unwrap()
|
||||
.flat_map(|child| child.collect::<Vec<_>>())
|
||||
.collect();
|
||||
assert_eq!(initial_output, forked_output);
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, all children got a number of remaining bytes equals to the number of
|
||||
/// bytes per child given as fork input.
|
||||
pub fn prop_fork_children_remaining_bytes<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let mut generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
assert!(generator
|
||||
.par_try_fork(nc, nb)
|
||||
.unwrap()
|
||||
.all(|c| c.remaining_bytes().0 == nb.0 as u128));
|
||||
}
|
||||
}
|
||||
|
||||
/// Check the property:
|
||||
/// On a valid fork, the number of remaining bytes of the parent is reduced by the
|
||||
/// number of children multiplied by the number of bytes per child.
|
||||
pub fn prop_fork_parent_remaining_bytes<G: AesBlockCipher>() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, nc, nb, i) = any_valid_fork().next().unwrap();
|
||||
let k = any_key().next().unwrap();
|
||||
let bytes_to_go = nc.0 * nb.0;
|
||||
let mut generator =
|
||||
AesCtrGenerator::<G>::new(k, Some(t), Some(t.increased(nc.0 * nb.0 + i)));
|
||||
let before_remaining_bytes = generator.remaining_bytes();
|
||||
let _ = generator.par_try_fork(nc, nb).unwrap();
|
||||
let after_remaining_bytes = generator.remaining_bytes();
|
||||
assert_eq!(
|
||||
before_remaining_bytes.0 - after_remaining_bytes.0,
|
||||
bytes_to_go as u128
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
176
concrete-csprng/src/generators/aes_ctr/states.rs
Normal file
176
concrete-csprng/src/generators/aes_ctr/states.rs
Normal file
@@ -0,0 +1,176 @@
|
||||
use crate::generators::aes_ctr::index::{AesIndex, TableIndex};
|
||||
use crate::generators::aes_ctr::BYTES_PER_BATCH;
|
||||
|
||||
/// A pointer to the next byte to be outputted by the generator.
|
||||
#[derive(Clone, Copy, Debug, PartialOrd, Ord, PartialEq, Eq)]
|
||||
pub struct BufferPointer(pub usize);
|
||||
|
||||
/// A structure representing the current state of generator using batched aes-ctr approach.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct State {
|
||||
table_index: TableIndex,
|
||||
buffer_pointer: BufferPointer,
|
||||
}
|
||||
|
||||
/// A structure representing the action to be taken by the generator after shifting its state.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum ShiftAction {
|
||||
/// Outputs the byte pointed to by the 0-th field.
|
||||
OutputByte(BufferPointer),
|
||||
/// Refresh the buffer starting from the 0-th field, and output the byte pointed to by the 0-th
|
||||
/// field.
|
||||
RefreshBatchAndOutputByte(AesIndex, BufferPointer),
|
||||
}
|
||||
|
||||
impl State {
|
||||
/// Creates a new state from the initial table index.
|
||||
///
|
||||
/// Note :
|
||||
/// ------
|
||||
///
|
||||
/// The `table_index` input, is the __first__ table index that will be outputted on the next
|
||||
/// call to `increment`. Put differently, the current table index of the newly created state
|
||||
/// is the predecessor of this one.
|
||||
pub fn new(table_index: TableIndex) -> Self {
|
||||
// We ensure that the table index is not the first one, to prevent wrapping on `decrement`,
|
||||
// and outputting `RefreshBatchAndOutputByte(AesIndex::MAX, ...)` on the first increment
|
||||
// (which would lead to loading a non continuous batch).
|
||||
assert_ne!(table_index, TableIndex::FIRST);
|
||||
State {
|
||||
// To ensure that the first outputted table index is the proper one, we decrement the
|
||||
// table index.
|
||||
table_index: table_index.decremented(),
|
||||
// To ensure that the first `ShiftAction` will be a `RefreshBatchAndOutputByte`, we set
|
||||
// the buffer to the last allowed value.
|
||||
buffer_pointer: BufferPointer(BYTES_PER_BATCH - 1),
|
||||
}
|
||||
}
|
||||
|
||||
/// Shifts the state forward of `shift` bytes.
|
||||
pub fn increase(&mut self, shift: usize) -> ShiftAction {
|
||||
self.table_index.increase(shift);
|
||||
let total_batch_index = self.buffer_pointer.0 + shift;
|
||||
if total_batch_index > BYTES_PER_BATCH - 1 {
|
||||
self.buffer_pointer.0 = self.table_index.byte_index.0;
|
||||
ShiftAction::RefreshBatchAndOutputByte(self.table_index.aes_index, self.buffer_pointer)
|
||||
} else {
|
||||
self.buffer_pointer.0 = total_batch_index;
|
||||
ShiftAction::OutputByte(self.buffer_pointer)
|
||||
}
|
||||
}
|
||||
|
||||
/// Shifts the state forward of one byte.
|
||||
pub fn increment(&mut self) -> ShiftAction {
|
||||
self.increase(1)
|
||||
}
|
||||
|
||||
/// Returns the current table index.
|
||||
pub fn table_index(&self) -> TableIndex {
|
||||
self.table_index
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for State {
|
||||
fn default() -> Self {
|
||||
State::new(TableIndex::FIRST)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::generators::aes_ctr::index::ByteIndex;
|
||||
use crate::generators::aes_ctr::BYTES_PER_AES_CALL;
|
||||
use rand::{thread_rng, Rng};
|
||||
|
||||
const REPEATS: usize = 1_000_000;
|
||||
|
||||
fn any_table_index() -> impl Iterator<Item = TableIndex> {
|
||||
std::iter::repeat_with(|| {
|
||||
TableIndex::new(
|
||||
AesIndex(thread_rng().gen()),
|
||||
ByteIndex(thread_rng().gen::<usize>() % BYTES_PER_AES_CALL),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn any_usize() -> impl Iterator<Item = usize> {
|
||||
std::iter::repeat_with(|| thread_rng().gen())
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t,
|
||||
/// State::new(t).increment() = RefreshBatchAndOutputByte(t.aes_index, t.byte_index)
|
||||
fn prop_state_new_increment() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, mut s) = any_table_index()
|
||||
.map(|t| (t, State::new(t)))
|
||||
.next()
|
||||
.unwrap();
|
||||
assert!(matches!(
|
||||
s.increment(),
|
||||
ShiftAction::RefreshBatchAndOutputByte(t_, BufferPointer(p_)) if t_ == t.aes_index && p_ == t.byte_index.0
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all states s, table indices t, positive integer i
|
||||
/// if s = State::new(t), then t.increased(i) = s.increased(i-1).table_index().
|
||||
fn prop_state_increase_table_index() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, mut s, i) = any_table_index()
|
||||
.zip(any_usize())
|
||||
.map(|(t, i)| (t, State::new(t), i))
|
||||
.next()
|
||||
.unwrap();
|
||||
s.increase(i);
|
||||
assert_eq!(s.table_index(), t.increased(i - 1))
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t, positive integer i such as t.byte_index + i < 127,
|
||||
/// if s = State::new(t), and s.increment() was executed, then
|
||||
/// s.increase(i) = OutputByte(t.byte_index + i).
|
||||
fn prop_state_increase_small() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, mut s, i) = any_table_index()
|
||||
.zip(any_usize())
|
||||
.map(|(t, i)| (t, State::new(t), i % BYTES_PER_BATCH))
|
||||
.find(|(t, _, i)| t.byte_index.0 + i < BYTES_PER_BATCH - 1)
|
||||
.unwrap();
|
||||
s.increment();
|
||||
assert!(matches!(
|
||||
s.increase(i),
|
||||
ShiftAction::OutputByte(BufferPointer(p_)) if p_ == t.byte_index.0 + i
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Check the property:
|
||||
/// For all table indices t, positive integer i such as t.byte_index + i >= 127,
|
||||
/// if s = State::new(t), and s.increment() was executed, then
|
||||
/// s.increase(i) = RefreshBatchAndOutputByte(
|
||||
/// t.increased(i).aes_index,
|
||||
/// t.increased(i).byte_index).
|
||||
fn prop_state_increase_large() {
|
||||
for _ in 0..REPEATS {
|
||||
let (t, mut s, i) = any_table_index()
|
||||
.zip(any_usize())
|
||||
.map(|(t, i)| (t, State::new(t), i))
|
||||
.find(|(t, _, i)| t.byte_index.0 + i >= BYTES_PER_BATCH - 1)
|
||||
.unwrap();
|
||||
s.increment();
|
||||
assert!(matches!(
|
||||
s.increase(i),
|
||||
ShiftAction::RefreshBatchAndOutputByte(t_, BufferPointer(p_))
|
||||
if t_ == t.increased(i).aes_index && p_ == t.increased(i).byte_index.0
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
184
concrete-csprng/src/generators/implem/aarch64/block_cipher.rs
Normal file
184
concrete-csprng/src/generators/implem/aarch64/block_cipher.rs
Normal file
@@ -0,0 +1,184 @@
|
||||
use crate::generators::aes_ctr::{AesBlockCipher, AesIndex, AesKey, BYTES_PER_BATCH};
|
||||
use core::arch::aarch64::{
|
||||
uint8x16_t, vaeseq_u8, vaesmcq_u8, vdupq_n_u32, vdupq_n_u8, veorq_u8, vgetq_lane_u32,
|
||||
vreinterpretq_u32_u8, vreinterpretq_u8_u32,
|
||||
};
|
||||
use std::arch::is_aarch64_feature_detected;
|
||||
use std::mem::transmute;
|
||||
|
||||
const RCONS: [u32; 10] = [0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1B, 0x36];
|
||||
const NUM_WORDS_IN_KEY: usize = 4;
|
||||
const NUM_ROUNDS: usize = 10;
|
||||
const NUM_ROUND_KEYS: usize = NUM_ROUNDS + 1;
|
||||
|
||||
/// An aes block cipher implementation which uses `neon` and `aes` instructions.
|
||||
#[derive(Clone)]
|
||||
pub struct ArmAesBlockCipher {
|
||||
round_keys: [uint8x16_t; NUM_ROUND_KEYS],
|
||||
}
|
||||
|
||||
impl AesBlockCipher for ArmAesBlockCipher {
|
||||
fn new(key: AesKey) -> ArmAesBlockCipher {
|
||||
let aes_detected = is_aarch64_feature_detected!("aes");
|
||||
let neon_detected = is_aarch64_feature_detected!("neon");
|
||||
|
||||
if !(aes_detected && neon_detected) {
|
||||
panic!(
|
||||
"The ArmAesBlockCipher requires both aes and neon aarch64 CPU features.\n\
|
||||
aes feature available: {}\nneon feature available: {}\n.",
|
||||
aes_detected, neon_detected
|
||||
)
|
||||
}
|
||||
|
||||
let round_keys = unsafe { generate_round_keys(key) };
|
||||
ArmAesBlockCipher { round_keys }
|
||||
}
|
||||
|
||||
fn generate_batch(&mut self, AesIndex(aes_ctr): AesIndex) -> [u8; BYTES_PER_BATCH] {
|
||||
#[target_feature(enable = "aes,neon")]
|
||||
unsafe fn implementation(
|
||||
this: &ArmAesBlockCipher,
|
||||
AesIndex(aes_ctr): AesIndex,
|
||||
) -> [u8; BYTES_PER_BATCH] {
|
||||
let mut output = [0u8; BYTES_PER_BATCH];
|
||||
// We want 128 bytes of output, the ctr gives 128 bit message (16 bytes)
|
||||
for (i, out) in output.chunks_exact_mut(16).enumerate() {
|
||||
// Safe because we prevent the user from creating the Generator
|
||||
// on non-supported hardware
|
||||
let encrypted = encrypt(aes_ctr + (i as u128), &this.round_keys);
|
||||
out.copy_from_slice(&encrypted.to_ne_bytes());
|
||||
}
|
||||
output
|
||||
}
|
||||
// SAFETY: we checked for aes and neon availability in `Self::new`
|
||||
unsafe { implementation(self, AesIndex(aes_ctr)) }
|
||||
}
|
||||
}
|
||||
|
||||
/// Does the AES SubWord operation for the Key Expansion step
|
||||
///
|
||||
/// # SAFETY
|
||||
///
|
||||
/// You must make sure the CPU's arch is`aarch64` and has
|
||||
/// `neon` and `aes` features.
|
||||
#[inline(always)]
|
||||
unsafe fn sub_word(word: u32) -> u32 {
|
||||
let data = vreinterpretq_u8_u32(vdupq_n_u32(word));
|
||||
let zero_key = vdupq_n_u8(0u8);
|
||||
let temp = vaeseq_u8(data, zero_key);
|
||||
// vaeseq_u8 does SubBytes(ShiftRow(XOR(data, key))
|
||||
// But because we used a zero aes key,the XOR did not alter data
|
||||
// We now have temp = SubBytes(ShiftRow(data))
|
||||
|
||||
// Since in AES ShiftRow operation, the first row is not shifted
|
||||
// We can just get that one to have our SubWord(word) result
|
||||
vgetq_lane_u32::<0>(vreinterpretq_u32_u8(temp))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn uint8x16_t_to_u128(input: uint8x16_t) -> u128 {
|
||||
unsafe { transmute(input) }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn u128_to_uint8x16_t(input: u128) -> uint8x16_t {
|
||||
unsafe { transmute(input) }
|
||||
}
|
||||
|
||||
#[target_feature(enable = "aes,neon")]
|
||||
unsafe fn generate_round_keys(key: AesKey) -> [uint8x16_t; NUM_ROUND_KEYS] {
|
||||
let mut round_keys: [uint8x16_t; NUM_ROUND_KEYS] = std::mem::zeroed();
|
||||
round_keys[0] = u128_to_uint8x16_t(key.0);
|
||||
|
||||
let words = std::slice::from_raw_parts_mut(
|
||||
round_keys.as_mut_ptr() as *mut u32,
|
||||
NUM_ROUND_KEYS * NUM_WORDS_IN_KEY,
|
||||
);
|
||||
|
||||
debug_assert_eq!(words.len(), 44);
|
||||
|
||||
// Skip the words of the first key, its already done
|
||||
for i in NUM_WORDS_IN_KEY..words.len() {
|
||||
if (i % NUM_WORDS_IN_KEY) == 0 {
|
||||
words[i] = words[i - NUM_WORDS_IN_KEY]
|
||||
^ sub_word(words[i - 1]).rotate_right(8)
|
||||
^ RCONS[(i / NUM_WORDS_IN_KEY) - 1];
|
||||
} else {
|
||||
words[i] = words[i - NUM_WORDS_IN_KEY] ^ words[i - 1];
|
||||
}
|
||||
// Note: there is also a special thing to do when
|
||||
// i mod SElf::NUM_WORDS_IN_KEY == 4 but it cannot happen on 128 bits keys
|
||||
}
|
||||
|
||||
round_keys
|
||||
}
|
||||
|
||||
/// Encrypts a 128-bit message
|
||||
///
|
||||
/// # SAFETY
|
||||
///
|
||||
/// You must make sure the CPU's arch is`aarch64` and has
|
||||
/// `neon` and `aes` features.
|
||||
#[inline(always)]
|
||||
unsafe fn encrypt(message: u128, keys: &[uint8x16_t; NUM_ROUND_KEYS]) -> u128 {
|
||||
// Notes:
|
||||
// According the [ARM Manual](https://developer.arm.com/documentation/ddi0487/gb/):
|
||||
// `vaeseq_u8` is the following AES operations:
|
||||
// 1. AddRoundKey (XOR)
|
||||
// 2. ShiftRows
|
||||
// 3. SubBytes
|
||||
// `vaesmcq_u8` is MixColumns
|
||||
let mut data: uint8x16_t = u128_to_uint8x16_t(message);
|
||||
|
||||
for &key in keys.iter().take(NUM_ROUNDS - 1) {
|
||||
data = vaesmcq_u8(vaeseq_u8(data, key));
|
||||
}
|
||||
|
||||
data = vaeseq_u8(data, keys[NUM_ROUNDS - 1]);
|
||||
data = veorq_u8(data, keys[NUM_ROUND_KEYS - 1]);
|
||||
|
||||
uint8x16_t_to_u128(data)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
// Test vector for aes128, from the FIPS publication 197
|
||||
const CIPHER_KEY: u128 = u128::from_be(0x000102030405060708090a0b0c0d0e0f);
|
||||
const KEY_SCHEDULE: [u128; 11] = [
|
||||
u128::from_be(0x000102030405060708090a0b0c0d0e0f),
|
||||
u128::from_be(0xd6aa74fdd2af72fadaa678f1d6ab76fe),
|
||||
u128::from_be(0xb692cf0b643dbdf1be9bc5006830b3fe),
|
||||
u128::from_be(0xb6ff744ed2c2c9bf6c590cbf0469bf41),
|
||||
u128::from_be(0x47f7f7bc95353e03f96c32bcfd058dfd),
|
||||
u128::from_be(0x3caaa3e8a99f9deb50f3af57adf622aa),
|
||||
u128::from_be(0x5e390f7df7a69296a7553dc10aa31f6b),
|
||||
u128::from_be(0x14f9701ae35fe28c440adf4d4ea9c026),
|
||||
u128::from_be(0x47438735a41c65b9e016baf4aebf7ad2),
|
||||
u128::from_be(0x549932d1f08557681093ed9cbe2c974e),
|
||||
u128::from_be(0x13111d7fe3944a17f307a78b4d2b30c5),
|
||||
];
|
||||
const PLAINTEXT: u128 = u128::from_be(0x00112233445566778899aabbccddeeff);
|
||||
const CIPHERTEXT: u128 = u128::from_be(0x69c4e0d86a7b0430d8cdb78070b4c55a);
|
||||
|
||||
#[test]
|
||||
fn test_generate_key_schedule() {
|
||||
// Checks that the round keys are correctly generated from the sample key from FIPS
|
||||
let key = AesKey(CIPHER_KEY);
|
||||
let keys = unsafe { generate_round_keys(key) };
|
||||
for (expected, actual) in KEY_SCHEDULE.iter().zip(keys.iter()) {
|
||||
assert_eq!(*expected, uint8x16_t_to_u128(*actual));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encrypt_message() {
|
||||
// Checks that encrypting many plaintext at the same time gives the correct output.
|
||||
let message = PLAINTEXT;
|
||||
let key = AesKey(CIPHER_KEY);
|
||||
let keys = unsafe { generate_round_keys(key) };
|
||||
let ciphertext = unsafe { encrypt(message, &keys) };
|
||||
assert_eq!(CIPHERTEXT, ciphertext);
|
||||
}
|
||||
}
|
||||
110
concrete-csprng/src/generators/implem/aarch64/generator.rs
Normal file
110
concrete-csprng/src/generators/implem/aarch64/generator.rs
Normal file
@@ -0,0 +1,110 @@
|
||||
use crate::generators::aes_ctr::{AesCtrGenerator, AesKey, ChildrenIterator};
|
||||
use crate::generators::implem::aarch64::block_cipher::ArmAesBlockCipher;
|
||||
use crate::generators::{ByteCount, BytesPerChild, ChildrenCount, ForkError, RandomGenerator};
|
||||
use crate::seeders::Seed;
|
||||
|
||||
/// A random number generator using the `aesni` instructions.
|
||||
pub struct NeonAesRandomGenerator(pub(super) AesCtrGenerator<ArmAesBlockCipher>);
|
||||
|
||||
/// The children iterator used by [`NeonAesRandomGenerator`].
|
||||
///
|
||||
/// Outputs children generators one by one.
|
||||
pub struct ArmAesChildrenIterator(ChildrenIterator<ArmAesBlockCipher>);
|
||||
|
||||
impl Iterator for ArmAesChildrenIterator {
|
||||
type Item = NeonAesRandomGenerator;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next().map(NeonAesRandomGenerator)
|
||||
}
|
||||
}
|
||||
|
||||
impl RandomGenerator for NeonAesRandomGenerator {
|
||||
type ChildrenIter = ArmAesChildrenIterator;
|
||||
fn new(seed: Seed) -> Self {
|
||||
NeonAesRandomGenerator(AesCtrGenerator::new(AesKey(seed.0), None, None))
|
||||
}
|
||||
fn remaining_bytes(&self) -> ByteCount {
|
||||
self.0.remaining_bytes()
|
||||
}
|
||||
fn try_fork(
|
||||
&mut self,
|
||||
n_children: ChildrenCount,
|
||||
n_bytes: BytesPerChild,
|
||||
) -> Result<Self::ChildrenIter, ForkError> {
|
||||
self.0
|
||||
.try_fork(n_children, n_bytes)
|
||||
.map(ArmAesChildrenIterator)
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for NeonAesRandomGenerator {
|
||||
type Item = u8;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::generators::aes_ctr::aes_ctr_generic_test;
|
||||
use crate::generators::implem::aarch64::block_cipher::ArmAesBlockCipher;
|
||||
use crate::generators::{generator_generic_test, NeonAesRandomGenerator};
|
||||
|
||||
#[test]
|
||||
fn prop_fork_first_state_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_first_state_table_index::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_last_bound_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_last_bound_table_index::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_bound_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_parent_bound_table_index::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_state_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_parent_state_table_index::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork() {
|
||||
aes_ctr_generic_test::prop_fork::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_children_remaining_bytes() {
|
||||
aes_ctr_generic_test::prop_fork_children_remaining_bytes::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_remaining_bytes() {
|
||||
aes_ctr_generic_test::prop_fork_parent_remaining_bytes::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_roughly_uniform() {
|
||||
generator_generic_test::test_roughly_uniform::<NeonAesRandomGenerator>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_generator_determinism() {
|
||||
generator_generic_test::test_generator_determinism::<NeonAesRandomGenerator>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fork() {
|
||||
generator_generic_test::test_fork_children::<NeonAesRandomGenerator>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "expected test panic")]
|
||||
fn test_bounded_panic() {
|
||||
generator_generic_test::test_bounded_none_should_panic::<NeonAesRandomGenerator>();
|
||||
}
|
||||
}
|
||||
16
concrete-csprng/src/generators/implem/aarch64/mod.rs
Normal file
16
concrete-csprng/src/generators/implem/aarch64/mod.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
//! A module implementing a random number generator, using the aarch64 `neon` and `aes`
|
||||
//! instructions.
|
||||
//!
|
||||
//! This module implements a cryptographically secure pseudorandom number generator
|
||||
//! (CS-PRNG), using a fast block cipher. The implementation is based on the
|
||||
//! [intel aesni white paper 323641-001 revision 3.0](https://www.intel.com/content/dam/doc/white-paper/advanced-encryption-standard-new-instructions-set-paper.pdf).
|
||||
|
||||
mod block_cipher;
|
||||
|
||||
mod generator;
|
||||
pub use generator::*;
|
||||
|
||||
#[cfg(feature = "parallel")]
|
||||
mod parallel;
|
||||
#[cfg(feature = "parallel")]
|
||||
pub use parallel::*;
|
||||
95
concrete-csprng/src/generators/implem/aarch64/parallel.rs
Normal file
95
concrete-csprng/src/generators/implem/aarch64/parallel.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use super::*;
|
||||
use crate::generators::aes_ctr::{AesCtrGenerator, ParallelChildrenIterator};
|
||||
use crate::generators::implem::aarch64::block_cipher::ArmAesBlockCipher;
|
||||
use crate::generators::{BytesPerChild, ChildrenCount, ForkError, ParallelRandomGenerator};
|
||||
use rayon::iter::plumbing::{Consumer, ProducerCallback, UnindexedConsumer};
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// The parallel children iterator used by [`NeonAesRandomGenerator`].
|
||||
///
|
||||
/// Outputs the children generators one by one.
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub struct ParallelArmAesChildrenIterator(
|
||||
rayon::iter::Map<
|
||||
ParallelChildrenIterator<ArmAesBlockCipher>,
|
||||
fn(AesCtrGenerator<ArmAesBlockCipher>) -> NeonAesRandomGenerator,
|
||||
>,
|
||||
);
|
||||
|
||||
impl ParallelIterator for ParallelArmAesChildrenIterator {
|
||||
type Item = NeonAesRandomGenerator;
|
||||
fn drive_unindexed<C>(self, consumer: C) -> C::Result
|
||||
where
|
||||
C: UnindexedConsumer<Self::Item>,
|
||||
{
|
||||
self.0.drive_unindexed(consumer)
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexedParallelIterator for ParallelArmAesChildrenIterator {
|
||||
fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
fn drive<C: Consumer<Self::Item>>(self, consumer: C) -> C::Result {
|
||||
self.0.drive(consumer)
|
||||
}
|
||||
fn with_producer<CB: ProducerCallback<Self::Item>>(self, callback: CB) -> CB::Output {
|
||||
self.0.with_producer(callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl ParallelRandomGenerator for NeonAesRandomGenerator {
|
||||
type ParChildrenIter = ParallelArmAesChildrenIterator;
|
||||
|
||||
fn par_try_fork(
|
||||
&mut self,
|
||||
n_children: ChildrenCount,
|
||||
n_bytes: BytesPerChild,
|
||||
) -> Result<Self::ParChildrenIter, ForkError> {
|
||||
self.0
|
||||
.par_try_fork(n_children, n_bytes)
|
||||
.map(|iterator| ParallelArmAesChildrenIterator(iterator.map(NeonAesRandomGenerator)))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
mod test {
|
||||
use crate::generators::aes_ctr::aes_ctr_parallel_generic_tests;
|
||||
use crate::generators::implem::aarch64::block_cipher::ArmAesBlockCipher;
|
||||
|
||||
#[test]
|
||||
fn prop_fork_first_state_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_first_state_table_index::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_last_bound_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_last_bound_table_index::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_bound_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_parent_bound_table_index::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_state_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_parent_state_table_index::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_ttt() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_children_remaining_bytes() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_children_remaining_bytes::<ArmAesBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_remaining_bytes() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_parent_remaining_bytes::<ArmAesBlockCipher>();
|
||||
}
|
||||
}
|
||||
231
concrete-csprng/src/generators/implem/aesni/block_cipher.rs
Normal file
231
concrete-csprng/src/generators/implem/aesni/block_cipher.rs
Normal file
@@ -0,0 +1,231 @@
|
||||
use crate::generators::aes_ctr::{AesBlockCipher, AesIndex, AesKey, BYTES_PER_BATCH};
|
||||
use std::arch::x86_64::{
|
||||
__m128i, _mm_aesenc_si128, _mm_aesenclast_si128, _mm_aeskeygenassist_si128, _mm_shuffle_epi32,
|
||||
_mm_slli_si128, _mm_store_si128, _mm_xor_si128,
|
||||
};
|
||||
use std::mem::transmute;
|
||||
|
||||
/// An aes block cipher implementation which uses `aesni` instructions.
|
||||
#[derive(Clone)]
|
||||
pub struct AesniBlockCipher {
|
||||
// The set of round keys used for the aes encryption
|
||||
round_keys: [__m128i; 11],
|
||||
}
|
||||
|
||||
impl AesBlockCipher for AesniBlockCipher {
|
||||
fn new(key: AesKey) -> AesniBlockCipher {
|
||||
let aes_detected = is_x86_feature_detected!("aes");
|
||||
let sse2_detected = is_x86_feature_detected!("sse2");
|
||||
|
||||
if !(aes_detected && sse2_detected) {
|
||||
panic!(
|
||||
"The AesniBlockCipher requires both aes and sse2 x86 CPU features.\n\
|
||||
aes feature available: {}\nsse2 feature available: {}\n.",
|
||||
aes_detected, sse2_detected
|
||||
)
|
||||
}
|
||||
|
||||
// SAFETY: we checked for aes and sse2 availability
|
||||
let round_keys = unsafe { generate_round_keys(key) };
|
||||
AesniBlockCipher { round_keys }
|
||||
}
|
||||
|
||||
fn generate_batch(&mut self, AesIndex(aes_ctr): AesIndex) -> [u8; BYTES_PER_BATCH] {
|
||||
#[target_feature(enable = "sse2,aes")]
|
||||
unsafe fn implementation(
|
||||
this: &AesniBlockCipher,
|
||||
AesIndex(aes_ctr): AesIndex,
|
||||
) -> [u8; BYTES_PER_BATCH] {
|
||||
si128arr_to_u8arr(aes_encrypt_many(
|
||||
u128_to_si128(aes_ctr),
|
||||
u128_to_si128(aes_ctr + 1),
|
||||
u128_to_si128(aes_ctr + 2),
|
||||
u128_to_si128(aes_ctr + 3),
|
||||
u128_to_si128(aes_ctr + 4),
|
||||
u128_to_si128(aes_ctr + 5),
|
||||
u128_to_si128(aes_ctr + 6),
|
||||
u128_to_si128(aes_ctr + 7),
|
||||
&this.round_keys,
|
||||
))
|
||||
}
|
||||
// SAFETY: we checked for aes and sse2 availability in `Self::new`
|
||||
unsafe { implementation(self, AesIndex(aes_ctr)) }
|
||||
}
|
||||
}
|
||||
|
||||
#[target_feature(enable = "sse2,aes")]
|
||||
unsafe fn generate_round_keys(key: AesKey) -> [__m128i; 11] {
|
||||
let key = u128_to_si128(key.0);
|
||||
let mut keys: [__m128i; 11] = [u128_to_si128(0); 11];
|
||||
aes_128_key_expansion(key, &mut keys);
|
||||
keys
|
||||
}
|
||||
|
||||
// Uses aes to encrypt many values at once. This allows a substantial speedup (around 30%)
|
||||
// compared to the naive approach.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[inline(always)]
|
||||
fn aes_encrypt_many(
|
||||
message_1: __m128i,
|
||||
message_2: __m128i,
|
||||
message_3: __m128i,
|
||||
message_4: __m128i,
|
||||
message_5: __m128i,
|
||||
message_6: __m128i,
|
||||
message_7: __m128i,
|
||||
message_8: __m128i,
|
||||
keys: &[__m128i; 11],
|
||||
) -> [__m128i; 8] {
|
||||
unsafe {
|
||||
let mut tmp_1 = _mm_xor_si128(message_1, keys[0]);
|
||||
let mut tmp_2 = _mm_xor_si128(message_2, keys[0]);
|
||||
let mut tmp_3 = _mm_xor_si128(message_3, keys[0]);
|
||||
let mut tmp_4 = _mm_xor_si128(message_4, keys[0]);
|
||||
let mut tmp_5 = _mm_xor_si128(message_5, keys[0]);
|
||||
let mut tmp_6 = _mm_xor_si128(message_6, keys[0]);
|
||||
let mut tmp_7 = _mm_xor_si128(message_7, keys[0]);
|
||||
let mut tmp_8 = _mm_xor_si128(message_8, keys[0]);
|
||||
|
||||
for key in keys.iter().take(10).skip(1) {
|
||||
tmp_1 = _mm_aesenc_si128(tmp_1, *key);
|
||||
tmp_2 = _mm_aesenc_si128(tmp_2, *key);
|
||||
tmp_3 = _mm_aesenc_si128(tmp_3, *key);
|
||||
tmp_4 = _mm_aesenc_si128(tmp_4, *key);
|
||||
tmp_5 = _mm_aesenc_si128(tmp_5, *key);
|
||||
tmp_6 = _mm_aesenc_si128(tmp_6, *key);
|
||||
tmp_7 = _mm_aesenc_si128(tmp_7, *key);
|
||||
tmp_8 = _mm_aesenc_si128(tmp_8, *key);
|
||||
}
|
||||
|
||||
tmp_1 = _mm_aesenclast_si128(tmp_1, keys[10]);
|
||||
tmp_2 = _mm_aesenclast_si128(tmp_2, keys[10]);
|
||||
tmp_3 = _mm_aesenclast_si128(tmp_3, keys[10]);
|
||||
tmp_4 = _mm_aesenclast_si128(tmp_4, keys[10]);
|
||||
tmp_5 = _mm_aesenclast_si128(tmp_5, keys[10]);
|
||||
tmp_6 = _mm_aesenclast_si128(tmp_6, keys[10]);
|
||||
tmp_7 = _mm_aesenclast_si128(tmp_7, keys[10]);
|
||||
tmp_8 = _mm_aesenclast_si128(tmp_8, keys[10]);
|
||||
|
||||
[tmp_1, tmp_2, tmp_3, tmp_4, tmp_5, tmp_6, tmp_7, tmp_8]
|
||||
}
|
||||
}
|
||||
|
||||
fn aes_128_assist(temp1: __m128i, temp2: __m128i) -> __m128i {
|
||||
let mut temp3: __m128i;
|
||||
let mut temp2 = temp2;
|
||||
let mut temp1 = temp1;
|
||||
unsafe {
|
||||
temp2 = _mm_shuffle_epi32(temp2, 0xff);
|
||||
temp3 = _mm_slli_si128(temp1, 0x4);
|
||||
temp1 = _mm_xor_si128(temp1, temp3);
|
||||
temp3 = _mm_slli_si128(temp3, 0x4);
|
||||
temp1 = _mm_xor_si128(temp1, temp3);
|
||||
temp3 = _mm_slli_si128(temp3, 0x4);
|
||||
temp1 = _mm_xor_si128(temp1, temp3);
|
||||
temp1 = _mm_xor_si128(temp1, temp2);
|
||||
}
|
||||
temp1
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn aes_128_key_expansion(key: __m128i, keys: &mut [__m128i; 11]) {
|
||||
let (mut temp1, mut temp2): (__m128i, __m128i);
|
||||
temp1 = key;
|
||||
unsafe {
|
||||
_mm_store_si128(keys.as_mut_ptr(), temp1);
|
||||
temp2 = _mm_aeskeygenassist_si128(temp1, 0x01);
|
||||
temp1 = aes_128_assist(temp1, temp2);
|
||||
_mm_store_si128(keys.as_mut_ptr().offset(1), temp1);
|
||||
temp2 = _mm_aeskeygenassist_si128(temp1, 0x02);
|
||||
temp1 = aes_128_assist(temp1, temp2);
|
||||
_mm_store_si128(keys.as_mut_ptr().offset(2), temp1);
|
||||
temp2 = _mm_aeskeygenassist_si128(temp1, 0x04);
|
||||
temp1 = aes_128_assist(temp1, temp2);
|
||||
_mm_store_si128(keys.as_mut_ptr().offset(3), temp1);
|
||||
temp2 = _mm_aeskeygenassist_si128(temp1, 0x08);
|
||||
temp1 = aes_128_assist(temp1, temp2);
|
||||
_mm_store_si128(keys.as_mut_ptr().offset(4), temp1);
|
||||
temp2 = _mm_aeskeygenassist_si128(temp1, 0x10);
|
||||
temp1 = aes_128_assist(temp1, temp2);
|
||||
_mm_store_si128(keys.as_mut_ptr().offset(5), temp1);
|
||||
temp2 = _mm_aeskeygenassist_si128(temp1, 0x20);
|
||||
temp1 = aes_128_assist(temp1, temp2);
|
||||
_mm_store_si128(keys.as_mut_ptr().offset(6), temp1);
|
||||
temp2 = _mm_aeskeygenassist_si128(temp1, 0x40);
|
||||
temp1 = aes_128_assist(temp1, temp2);
|
||||
_mm_store_si128(keys.as_mut_ptr().offset(7), temp1);
|
||||
temp2 = _mm_aeskeygenassist_si128(temp1, 0x80);
|
||||
temp1 = aes_128_assist(temp1, temp2);
|
||||
_mm_store_si128(keys.as_mut_ptr().offset(8), temp1);
|
||||
temp2 = _mm_aeskeygenassist_si128(temp1, 0x1b);
|
||||
temp1 = aes_128_assist(temp1, temp2);
|
||||
_mm_store_si128(keys.as_mut_ptr().offset(9), temp1);
|
||||
temp2 = _mm_aeskeygenassist_si128(temp1, 0x36);
|
||||
temp1 = aes_128_assist(temp1, temp2);
|
||||
_mm_store_si128(keys.as_mut_ptr().offset(10), temp1);
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn u128_to_si128(input: u128) -> __m128i {
|
||||
unsafe { transmute(input) }
|
||||
}
|
||||
|
||||
#[allow(unused)] // to please clippy when tests are not activated
|
||||
fn si128_to_u128(input: __m128i) -> u128 {
|
||||
unsafe { transmute(input) }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn si128arr_to_u8arr(input: [__m128i; 8]) -> [u8; BYTES_PER_BATCH] {
|
||||
unsafe { transmute(input) }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
// Test vector for aes128, from the FIPS publication 197
|
||||
const CIPHER_KEY: u128 = u128::from_be(0x000102030405060708090a0b0c0d0e0f);
|
||||
const KEY_SCHEDULE: [u128; 11] = [
|
||||
u128::from_be(0x000102030405060708090a0b0c0d0e0f),
|
||||
u128::from_be(0xd6aa74fdd2af72fadaa678f1d6ab76fe),
|
||||
u128::from_be(0xb692cf0b643dbdf1be9bc5006830b3fe),
|
||||
u128::from_be(0xb6ff744ed2c2c9bf6c590cbf0469bf41),
|
||||
u128::from_be(0x47f7f7bc95353e03f96c32bcfd058dfd),
|
||||
u128::from_be(0x3caaa3e8a99f9deb50f3af57adf622aa),
|
||||
u128::from_be(0x5e390f7df7a69296a7553dc10aa31f6b),
|
||||
u128::from_be(0x14f9701ae35fe28c440adf4d4ea9c026),
|
||||
u128::from_be(0x47438735a41c65b9e016baf4aebf7ad2),
|
||||
u128::from_be(0x549932d1f08557681093ed9cbe2c974e),
|
||||
u128::from_be(0x13111d7fe3944a17f307a78b4d2b30c5),
|
||||
];
|
||||
const PLAINTEXT: u128 = u128::from_be(0x00112233445566778899aabbccddeeff);
|
||||
const CIPHERTEXT: u128 = u128::from_be(0x69c4e0d86a7b0430d8cdb78070b4c55a);
|
||||
|
||||
#[test]
|
||||
fn test_generate_key_schedule() {
|
||||
// Checks that the round keys are correctly generated from the sample key from FIPS
|
||||
let key = u128_to_si128(CIPHER_KEY);
|
||||
let mut keys: [__m128i; 11] = [u128_to_si128(0); 11];
|
||||
aes_128_key_expansion(key, &mut keys);
|
||||
for (expected, actual) in KEY_SCHEDULE.iter().zip(keys.iter()) {
|
||||
assert_eq!(*expected, si128_to_u128(*actual));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encrypt_many_messages() {
|
||||
// Checks that encrypting many plaintext at the same time gives the correct output.
|
||||
let message = u128_to_si128(PLAINTEXT);
|
||||
let key = u128_to_si128(CIPHER_KEY);
|
||||
let mut keys: [__m128i; 11] = [u128_to_si128(0); 11];
|
||||
aes_128_key_expansion(key, &mut keys);
|
||||
let ciphertexts = aes_encrypt_many(
|
||||
message, message, message, message, message, message, message, message, &keys,
|
||||
);
|
||||
for ct in &ciphertexts {
|
||||
assert_eq!(CIPHERTEXT, si128_to_u128(*ct));
|
||||
}
|
||||
}
|
||||
}
|
||||
110
concrete-csprng/src/generators/implem/aesni/generator.rs
Normal file
110
concrete-csprng/src/generators/implem/aesni/generator.rs
Normal file
@@ -0,0 +1,110 @@
|
||||
use crate::generators::aes_ctr::{AesCtrGenerator, AesKey, ChildrenIterator};
|
||||
use crate::generators::implem::aesni::block_cipher::AesniBlockCipher;
|
||||
use crate::generators::{ByteCount, BytesPerChild, ChildrenCount, ForkError, RandomGenerator};
|
||||
use crate::seeders::Seed;
|
||||
|
||||
/// A random number generator using the `aesni` instructions.
|
||||
pub struct AesniRandomGenerator(pub(super) AesCtrGenerator<AesniBlockCipher>);
|
||||
|
||||
/// The children iterator used by [`AesniRandomGenerator`].
|
||||
///
|
||||
/// Outputs children generators one by one.
|
||||
pub struct AesniChildrenIterator(ChildrenIterator<AesniBlockCipher>);
|
||||
|
||||
impl Iterator for AesniChildrenIterator {
|
||||
type Item = AesniRandomGenerator;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next().map(AesniRandomGenerator)
|
||||
}
|
||||
}
|
||||
|
||||
impl RandomGenerator for AesniRandomGenerator {
|
||||
type ChildrenIter = AesniChildrenIterator;
|
||||
fn new(seed: Seed) -> Self {
|
||||
AesniRandomGenerator(AesCtrGenerator::new(AesKey(seed.0), None, None))
|
||||
}
|
||||
fn remaining_bytes(&self) -> ByteCount {
|
||||
self.0.remaining_bytes()
|
||||
}
|
||||
fn try_fork(
|
||||
&mut self,
|
||||
n_children: ChildrenCount,
|
||||
n_bytes: BytesPerChild,
|
||||
) -> Result<Self::ChildrenIter, ForkError> {
|
||||
self.0
|
||||
.try_fork(n_children, n_bytes)
|
||||
.map(AesniChildrenIterator)
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for AesniRandomGenerator {
|
||||
type Item = u8;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::generators::aes_ctr::aes_ctr_generic_test;
|
||||
use crate::generators::implem::aesni::block_cipher::AesniBlockCipher;
|
||||
use crate::generators::{generator_generic_test, AesniRandomGenerator};
|
||||
|
||||
#[test]
|
||||
fn prop_fork_first_state_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_first_state_table_index::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_last_bound_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_last_bound_table_index::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_bound_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_parent_bound_table_index::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_state_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_parent_state_table_index::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork() {
|
||||
aes_ctr_generic_test::prop_fork::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_children_remaining_bytes() {
|
||||
aes_ctr_generic_test::prop_fork_children_remaining_bytes::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_remaining_bytes() {
|
||||
aes_ctr_generic_test::prop_fork_parent_remaining_bytes::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_roughly_uniform() {
|
||||
generator_generic_test::test_roughly_uniform::<AesniRandomGenerator>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_generator_determinism() {
|
||||
generator_generic_test::test_generator_determinism::<AesniRandomGenerator>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fork() {
|
||||
generator_generic_test::test_fork_children::<AesniRandomGenerator>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "expected test panic")]
|
||||
fn test_bounded_panic() {
|
||||
generator_generic_test::test_bounded_none_should_panic::<AesniRandomGenerator>();
|
||||
}
|
||||
}
|
||||
15
concrete-csprng/src/generators/implem/aesni/mod.rs
Normal file
15
concrete-csprng/src/generators/implem/aesni/mod.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
//! A module implementing a random number generator, using the x86_64 `aesni` instructions.
|
||||
//!
|
||||
//! This module implements a cryptographically secure pseudorandom number generator
|
||||
//! (CS-PRNG), using a fast block cipher. The implementation is based on the
|
||||
//! [intel aesni white paper 323641-001 revision 3.0](https://www.intel.com/content/dam/doc/white-paper/advanced-encryption-standard-new-instructions-set-paper.pdf).
|
||||
|
||||
mod block_cipher;
|
||||
|
||||
mod generator;
|
||||
pub use generator::*;
|
||||
|
||||
#[cfg(feature = "parallel")]
|
||||
mod parallel;
|
||||
#[cfg(feature = "parallel")]
|
||||
pub use parallel::*;
|
||||
95
concrete-csprng/src/generators/implem/aesni/parallel.rs
Normal file
95
concrete-csprng/src/generators/implem/aesni/parallel.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use super::*;
|
||||
use crate::generators::aes_ctr::{AesCtrGenerator, ParallelChildrenIterator};
|
||||
use crate::generators::implem::aesni::block_cipher::AesniBlockCipher;
|
||||
use crate::generators::{BytesPerChild, ChildrenCount, ForkError, ParallelRandomGenerator};
|
||||
use rayon::iter::plumbing::{Consumer, ProducerCallback, UnindexedConsumer};
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// The parallel children iterator used by [`AesniRandomGenerator`].
|
||||
///
|
||||
/// Outputs the children generators one by one.
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub struct ParallelAesniChildrenIterator(
|
||||
rayon::iter::Map<
|
||||
ParallelChildrenIterator<AesniBlockCipher>,
|
||||
fn(AesCtrGenerator<AesniBlockCipher>) -> AesniRandomGenerator,
|
||||
>,
|
||||
);
|
||||
|
||||
impl ParallelIterator for ParallelAesniChildrenIterator {
|
||||
type Item = AesniRandomGenerator;
|
||||
fn drive_unindexed<C>(self, consumer: C) -> C::Result
|
||||
where
|
||||
C: UnindexedConsumer<Self::Item>,
|
||||
{
|
||||
self.0.drive_unindexed(consumer)
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexedParallelIterator for ParallelAesniChildrenIterator {
|
||||
fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
fn drive<C: Consumer<Self::Item>>(self, consumer: C) -> C::Result {
|
||||
self.0.drive(consumer)
|
||||
}
|
||||
fn with_producer<CB: ProducerCallback<Self::Item>>(self, callback: CB) -> CB::Output {
|
||||
self.0.with_producer(callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl ParallelRandomGenerator for AesniRandomGenerator {
|
||||
type ParChildrenIter = ParallelAesniChildrenIterator;
|
||||
|
||||
fn par_try_fork(
|
||||
&mut self,
|
||||
n_children: ChildrenCount,
|
||||
n_bytes: BytesPerChild,
|
||||
) -> Result<Self::ParChildrenIter, ForkError> {
|
||||
self.0
|
||||
.par_try_fork(n_children, n_bytes)
|
||||
.map(|iterator| ParallelAesniChildrenIterator(iterator.map(AesniRandomGenerator)))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
mod test {
|
||||
use crate::generators::aes_ctr::aes_ctr_parallel_generic_tests;
|
||||
use crate::generators::implem::aesni::block_cipher::AesniBlockCipher;
|
||||
|
||||
#[test]
|
||||
fn prop_fork_first_state_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_first_state_table_index::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_last_bound_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_last_bound_table_index::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_bound_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_parent_bound_table_index::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_state_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_parent_state_table_index::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_ttt() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_children_remaining_bytes() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_children_remaining_bytes::<AesniBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_remaining_bytes() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_parent_remaining_bytes::<AesniBlockCipher>();
|
||||
}
|
||||
}
|
||||
14
concrete-csprng/src/generators/implem/mod.rs
Normal file
14
concrete-csprng/src/generators/implem/mod.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
#[cfg(feature = "generator_x86_64_aesni")]
|
||||
mod aesni;
|
||||
#[cfg(feature = "generator_x86_64_aesni")]
|
||||
pub use aesni::*;
|
||||
|
||||
#[cfg(feature = "generator_aarch64_aes")]
|
||||
mod aarch64;
|
||||
#[cfg(feature = "generator_aarch64_aes")]
|
||||
pub use aarch64::*;
|
||||
|
||||
#[cfg(feature = "generator_fallback")]
|
||||
mod soft;
|
||||
#[cfg(feature = "generator_fallback")]
|
||||
pub use soft::*;
|
||||
114
concrete-csprng/src/generators/implem/soft/block_cipher.rs
Normal file
114
concrete-csprng/src/generators/implem/soft/block_cipher.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
use crate::generators::aes_ctr::{
|
||||
AesBlockCipher, AesIndex, AesKey, AES_CALLS_PER_BATCH, BYTES_PER_AES_CALL, BYTES_PER_BATCH,
|
||||
};
|
||||
use aes::cipher::generic_array::GenericArray;
|
||||
use aes::cipher::{BlockEncrypt, KeyInit};
|
||||
use aes::Aes128;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SoftwareBlockCipher {
|
||||
// Aes structure
|
||||
aes: Aes128,
|
||||
}
|
||||
|
||||
impl AesBlockCipher for SoftwareBlockCipher {
|
||||
fn new(key: AesKey) -> SoftwareBlockCipher {
|
||||
let key: [u8; BYTES_PER_AES_CALL] = key.0.to_ne_bytes();
|
||||
let key = GenericArray::clone_from_slice(&key[..]);
|
||||
let aes = Aes128::new(&key);
|
||||
SoftwareBlockCipher { aes }
|
||||
}
|
||||
|
||||
fn generate_batch(&mut self, AesIndex(aes_ctr): AesIndex) -> [u8; BYTES_PER_BATCH] {
|
||||
aes_encrypt_many(
|
||||
aes_ctr,
|
||||
aes_ctr + 1,
|
||||
aes_ctr + 2,
|
||||
aes_ctr + 3,
|
||||
aes_ctr + 4,
|
||||
aes_ctr + 5,
|
||||
aes_ctr + 6,
|
||||
aes_ctr + 7,
|
||||
&self.aes,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Uses aes to encrypt many values at once. This allows a substantial speedup (around 30%)
|
||||
// compared to the naive approach.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn aes_encrypt_many(
|
||||
message_1: u128,
|
||||
message_2: u128,
|
||||
message_3: u128,
|
||||
message_4: u128,
|
||||
message_5: u128,
|
||||
message_6: u128,
|
||||
message_7: u128,
|
||||
message_8: u128,
|
||||
cipher: &Aes128,
|
||||
) -> [u8; BYTES_PER_BATCH] {
|
||||
let mut b1 = GenericArray::clone_from_slice(&message_1.to_ne_bytes()[..]);
|
||||
let mut b2 = GenericArray::clone_from_slice(&message_2.to_ne_bytes()[..]);
|
||||
let mut b3 = GenericArray::clone_from_slice(&message_3.to_ne_bytes()[..]);
|
||||
let mut b4 = GenericArray::clone_from_slice(&message_4.to_ne_bytes()[..]);
|
||||
let mut b5 = GenericArray::clone_from_slice(&message_5.to_ne_bytes()[..]);
|
||||
let mut b6 = GenericArray::clone_from_slice(&message_6.to_ne_bytes()[..]);
|
||||
let mut b7 = GenericArray::clone_from_slice(&message_7.to_ne_bytes()[..]);
|
||||
let mut b8 = GenericArray::clone_from_slice(&message_8.to_ne_bytes()[..]);
|
||||
|
||||
cipher.encrypt_block(&mut b1);
|
||||
cipher.encrypt_block(&mut b2);
|
||||
cipher.encrypt_block(&mut b3);
|
||||
cipher.encrypt_block(&mut b4);
|
||||
cipher.encrypt_block(&mut b5);
|
||||
cipher.encrypt_block(&mut b6);
|
||||
cipher.encrypt_block(&mut b7);
|
||||
cipher.encrypt_block(&mut b8);
|
||||
|
||||
let output_array: [[u8; BYTES_PER_AES_CALL]; AES_CALLS_PER_BATCH] = [
|
||||
b1.into(),
|
||||
b2.into(),
|
||||
b3.into(),
|
||||
b4.into(),
|
||||
b5.into(),
|
||||
b6.into(),
|
||||
b7.into(),
|
||||
b8.into(),
|
||||
];
|
||||
|
||||
unsafe { *{ output_array.as_ptr() as *const [u8; BYTES_PER_BATCH] } }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use std::convert::TryInto;
|
||||
|
||||
// Test vector for aes128, from the FIPS publication 197
|
||||
const CIPHER_KEY: u128 = u128::from_be(0x000102030405060708090a0b0c0d0e0f);
|
||||
const PLAINTEXT: u128 = u128::from_be(0x00112233445566778899aabbccddeeff);
|
||||
const CIPHERTEXT: u128 = u128::from_be(0x69c4e0d86a7b0430d8cdb78070b4c55a);
|
||||
|
||||
#[test]
|
||||
fn test_encrypt_many_messages() {
|
||||
// Checks that encrypting many plaintext at the same time gives the correct output.
|
||||
let key: [u8; BYTES_PER_AES_CALL] = CIPHER_KEY.to_ne_bytes();
|
||||
let aes = Aes128::new(&GenericArray::from(key));
|
||||
let ciphertexts = aes_encrypt_many(
|
||||
PLAINTEXT, PLAINTEXT, PLAINTEXT, PLAINTEXT, PLAINTEXT, PLAINTEXT, PLAINTEXT, PLAINTEXT,
|
||||
&aes,
|
||||
);
|
||||
let ciphertexts: [u8; BYTES_PER_BATCH] = ciphertexts[..].try_into().unwrap();
|
||||
for i in 0..8 {
|
||||
assert_eq!(
|
||||
u128::from_ne_bytes(
|
||||
ciphertexts[BYTES_PER_AES_CALL * i..BYTES_PER_AES_CALL * (i + 1)]
|
||||
.try_into()
|
||||
.unwrap()
|
||||
),
|
||||
CIPHERTEXT
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
110
concrete-csprng/src/generators/implem/soft/generator.rs
Normal file
110
concrete-csprng/src/generators/implem/soft/generator.rs
Normal file
@@ -0,0 +1,110 @@
|
||||
use crate::generators::aes_ctr::{AesCtrGenerator, AesKey, ChildrenIterator};
|
||||
use crate::generators::implem::soft::block_cipher::SoftwareBlockCipher;
|
||||
use crate::generators::{ByteCount, BytesPerChild, ChildrenCount, ForkError, RandomGenerator};
|
||||
use crate::seeders::Seed;
|
||||
|
||||
/// A random number generator using a software implementation.
|
||||
pub struct SoftwareRandomGenerator(pub(super) AesCtrGenerator<SoftwareBlockCipher>);
|
||||
|
||||
/// The children iterator used by [`SoftwareRandomGenerator`].
|
||||
///
|
||||
/// Outputs children generators one by one.
|
||||
pub struct SoftwareChildrenIterator(ChildrenIterator<SoftwareBlockCipher>);
|
||||
|
||||
impl Iterator for SoftwareChildrenIterator {
|
||||
type Item = SoftwareRandomGenerator;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next().map(SoftwareRandomGenerator)
|
||||
}
|
||||
}
|
||||
|
||||
impl RandomGenerator for SoftwareRandomGenerator {
|
||||
type ChildrenIter = SoftwareChildrenIterator;
|
||||
fn new(seed: Seed) -> Self {
|
||||
SoftwareRandomGenerator(AesCtrGenerator::new(AesKey(seed.0), None, None))
|
||||
}
|
||||
fn remaining_bytes(&self) -> ByteCount {
|
||||
self.0.remaining_bytes()
|
||||
}
|
||||
fn try_fork(
|
||||
&mut self,
|
||||
n_children: ChildrenCount,
|
||||
n_bytes: BytesPerChild,
|
||||
) -> Result<Self::ChildrenIter, ForkError> {
|
||||
self.0
|
||||
.try_fork(n_children, n_bytes)
|
||||
.map(SoftwareChildrenIterator)
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for SoftwareRandomGenerator {
|
||||
type Item = u8;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::generators::aes_ctr::aes_ctr_generic_test;
|
||||
use crate::generators::generator_generic_test;
|
||||
|
||||
#[test]
|
||||
fn prop_fork_first_state_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_first_state_table_index::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_last_bound_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_last_bound_table_index::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_bound_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_parent_bound_table_index::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_state_table_index() {
|
||||
aes_ctr_generic_test::prop_fork_parent_state_table_index::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork() {
|
||||
aes_ctr_generic_test::prop_fork::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_children_remaining_bytes() {
|
||||
aes_ctr_generic_test::prop_fork_children_remaining_bytes::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_remaining_bytes() {
|
||||
aes_ctr_generic_test::prop_fork_parent_remaining_bytes::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_roughly_uniform() {
|
||||
generator_generic_test::test_roughly_uniform::<SoftwareRandomGenerator>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fork() {
|
||||
generator_generic_test::test_fork_children::<SoftwareRandomGenerator>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_generator_determinism() {
|
||||
generator_generic_test::test_generator_determinism::<SoftwareRandomGenerator>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "expected test panic")]
|
||||
fn test_bounded_panic() {
|
||||
generator_generic_test::test_bounded_none_should_panic::<SoftwareRandomGenerator>();
|
||||
}
|
||||
}
|
||||
11
concrete-csprng/src/generators/implem/soft/mod.rs
Normal file
11
concrete-csprng/src/generators/implem/soft/mod.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
//! A module using a software fallback implementation of random number generator.
|
||||
|
||||
mod block_cipher;
|
||||
|
||||
mod generator;
|
||||
pub use generator::*;
|
||||
|
||||
#[cfg(feature = "parallel")]
|
||||
mod parallel;
|
||||
#[cfg(feature = "parallel")]
|
||||
pub use parallel::*;
|
||||
94
concrete-csprng/src/generators/implem/soft/parallel.rs
Normal file
94
concrete-csprng/src/generators/implem/soft/parallel.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
use super::*;
|
||||
use crate::generators::aes_ctr::{AesCtrGenerator, ParallelChildrenIterator};
|
||||
use crate::generators::implem::soft::block_cipher::SoftwareBlockCipher;
|
||||
use crate::generators::{BytesPerChild, ChildrenCount, ForkError, ParallelRandomGenerator};
|
||||
use rayon::iter::plumbing::{Consumer, ProducerCallback, UnindexedConsumer};
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// The parallel children iterator used by [`SoftwareRandomGenerator`].
|
||||
///
|
||||
/// Outputs the children generators one by one.
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub struct ParallelSoftwareChildrenIterator(
|
||||
rayon::iter::Map<
|
||||
ParallelChildrenIterator<SoftwareBlockCipher>,
|
||||
fn(AesCtrGenerator<SoftwareBlockCipher>) -> SoftwareRandomGenerator,
|
||||
>,
|
||||
);
|
||||
|
||||
impl ParallelIterator for ParallelSoftwareChildrenIterator {
|
||||
type Item = SoftwareRandomGenerator;
|
||||
fn drive_unindexed<C>(self, consumer: C) -> C::Result
|
||||
where
|
||||
C: UnindexedConsumer<Self::Item>,
|
||||
{
|
||||
self.0.drive_unindexed(consumer)
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexedParallelIterator for ParallelSoftwareChildrenIterator {
|
||||
fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
fn drive<C: Consumer<Self::Item>>(self, consumer: C) -> C::Result {
|
||||
self.0.drive(consumer)
|
||||
}
|
||||
fn with_producer<CB: ProducerCallback<Self::Item>>(self, callback: CB) -> CB::Output {
|
||||
self.0.with_producer(callback)
|
||||
}
|
||||
}
|
||||
|
||||
impl ParallelRandomGenerator for SoftwareRandomGenerator {
|
||||
type ParChildrenIter = ParallelSoftwareChildrenIterator;
|
||||
|
||||
fn par_try_fork(
|
||||
&mut self,
|
||||
n_children: ChildrenCount,
|
||||
n_bytes: BytesPerChild,
|
||||
) -> Result<Self::ParChildrenIter, ForkError> {
|
||||
self.0
|
||||
.par_try_fork(n_children, n_bytes)
|
||||
.map(|iterator| ParallelSoftwareChildrenIterator(iterator.map(SoftwareRandomGenerator)))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::generators::aes_ctr::aes_ctr_parallel_generic_tests;
|
||||
|
||||
#[test]
|
||||
fn prop_fork_first_state_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_first_state_table_index::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_last_bound_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_last_bound_table_index::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_bound_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_parent_bound_table_index::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_state_table_index() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_parent_state_table_index::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_children_remaining_bytes() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_children_remaining_bytes::<SoftwareBlockCipher>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn prop_fork_parent_remaining_bytes() {
|
||||
aes_ctr_parallel_generic_tests::prop_fork_parent_remaining_bytes::<SoftwareBlockCipher>();
|
||||
}
|
||||
}
|
||||
235
concrete-csprng/src/generators/mod.rs
Normal file
235
concrete-csprng/src/generators/mod.rs
Normal file
@@ -0,0 +1,235 @@
|
||||
//! A module containing random generators objects.
|
||||
//!
|
||||
//! See [crate-level](`crate`) explanations.
|
||||
use crate::seeders::Seed;
|
||||
use std::error::Error;
|
||||
use std::fmt::{Display, Formatter};
|
||||
|
||||
/// The number of children created when a generator is forked.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ChildrenCount(pub usize);
|
||||
|
||||
/// The number of bytes each child can generate, when a generator is forked.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BytesPerChild(pub usize);
|
||||
|
||||
/// A structure representing the number of bytes between two table indices.
|
||||
#[derive(Clone, Copy, Debug, PartialOrd, Ord, PartialEq, Eq)]
|
||||
pub struct ByteCount(pub u128);
|
||||
|
||||
/// An error occurring during a generator fork.
|
||||
#[derive(Debug)]
|
||||
pub enum ForkError {
|
||||
ForkTooLarge,
|
||||
ZeroChildrenCount,
|
||||
ZeroBytesPerChild,
|
||||
}
|
||||
|
||||
impl Display for ForkError {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ForkError::ForkTooLarge => {
|
||||
write!(
|
||||
f,
|
||||
"The children generators would output bytes after the parent bound. "
|
||||
)
|
||||
}
|
||||
ForkError::ZeroChildrenCount => {
|
||||
write!(
|
||||
f,
|
||||
"The number of children in the fork must be greater than zero."
|
||||
)
|
||||
}
|
||||
ForkError::ZeroBytesPerChild => {
|
||||
write!(
|
||||
f,
|
||||
"The number of bytes per child must be greater than zero."
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Error for ForkError {}
|
||||
|
||||
/// A trait for cryptographically secure pseudo-random generators.
|
||||
///
|
||||
/// See the [crate-level](#crate) documentation for details.
|
||||
pub trait RandomGenerator: Iterator<Item = u8> {
|
||||
/// The iterator over children generators, returned by `try_fork` in case of success.
|
||||
type ChildrenIter: Iterator<Item = Self>;
|
||||
|
||||
/// Creates a new generator from a seed.
|
||||
///
|
||||
/// This operation is usually costly to perform, as the aes round keys need to be generated from
|
||||
/// the seed.
|
||||
fn new(seed: Seed) -> Self;
|
||||
|
||||
/// Returns the number of bytes that can still be outputted by the generator before reaching its
|
||||
/// bound.
|
||||
///
|
||||
/// Note:
|
||||
/// -----
|
||||
///
|
||||
/// A fresh generator can generate 2¹³² bytes. Unfortunately, no rust integer type in is able
|
||||
/// to encode such a large number. Consequently [`ByteCount`] uses the largest integer type
|
||||
/// available to encode this value: the `u128` type. For this reason, this method does not
|
||||
/// effectively return the number of remaining bytes, but instead
|
||||
/// `min(2¹²⁸-1, remaining_bytes)`.
|
||||
fn remaining_bytes(&self) -> ByteCount;
|
||||
|
||||
/// Returns the next byte of the stream, if the generator did not yet reach its bound.
|
||||
fn next_byte(&mut self) -> Option<u8> {
|
||||
self.next()
|
||||
}
|
||||
|
||||
/// Tries to fork the generator into an iterator of `n_children` new generators, each able to
|
||||
/// output `n_bytes` bytes.
|
||||
///
|
||||
/// Note:
|
||||
/// -----
|
||||
///
|
||||
/// To be successful, the number of remaining bytes for the parent generator must be larger than
|
||||
/// `n_children*n_bytes`.
|
||||
fn try_fork(
|
||||
&mut self,
|
||||
n_children: ChildrenCount,
|
||||
n_bytes: BytesPerChild,
|
||||
) -> Result<Self::ChildrenIter, ForkError>;
|
||||
}
|
||||
|
||||
/// A trait extending [`RandomGenerator`] to the parallel iterators of `rayon`.
|
||||
#[cfg(feature = "parallel")]
|
||||
pub trait ParallelRandomGenerator: RandomGenerator + Send {
|
||||
/// The iterator over children generators, returned by `par_try_fork` in case of success.
|
||||
type ParChildrenIter: rayon::prelude::IndexedParallelIterator<Item = Self>;
|
||||
|
||||
/// Tries to fork the generator into a parallel iterator of `n_children` new generators, each
|
||||
/// able to output `n_bytes` bytes.
|
||||
///
|
||||
/// Note:
|
||||
/// -----
|
||||
///
|
||||
/// To be successful, the number of remaining bytes for the parent generator must be larger than
|
||||
/// `n_children*n_bytes`.
|
||||
fn par_try_fork(
|
||||
&mut self,
|
||||
n_children: ChildrenCount,
|
||||
n_bytes: BytesPerChild,
|
||||
) -> Result<Self::ParChildrenIter, ForkError>;
|
||||
}
|
||||
|
||||
mod aes_ctr;
|
||||
|
||||
mod implem;
|
||||
pub use implem::*;
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod generator_generic_test {
|
||||
#![allow(unused)] // to please clippy when tests are not activated
|
||||
use super::*;
|
||||
use rand::Rng;
|
||||
|
||||
const REPEATS: usize = 1_000;
|
||||
|
||||
fn any_seed() -> impl Iterator<Item = Seed> {
|
||||
std::iter::repeat_with(|| Seed(rand::thread_rng().gen()))
|
||||
}
|
||||
|
||||
fn some_children_count() -> impl Iterator<Item = ChildrenCount> {
|
||||
std::iter::repeat_with(|| ChildrenCount(rand::thread_rng().gen::<usize>() % 16 + 1))
|
||||
}
|
||||
|
||||
fn some_bytes_per_child() -> impl Iterator<Item = BytesPerChild> {
|
||||
std::iter::repeat_with(|| BytesPerChild(rand::thread_rng().gen::<usize>() % 128 + 1))
|
||||
}
|
||||
|
||||
/// Checks that the PRNG roughly generates uniform numbers.
|
||||
///
|
||||
/// To do that, we perform an histogram of the occurrences of each byte value, over a fixed
|
||||
/// number of samples and check that the empirical probabilities of the bins are close to
|
||||
/// the theoretical probabilities.
|
||||
pub fn test_roughly_uniform<G: RandomGenerator>() {
|
||||
// Number of bins to use for the histogram.
|
||||
const N_BINS: usize = u8::MAX as usize + 1;
|
||||
// Number of samples to use for the histogram.
|
||||
let n_samples = 10_000_000_usize;
|
||||
// Theoretical probability of a each bins.
|
||||
let expected_prob: f64 = 1. / N_BINS as f64;
|
||||
// Absolute error allowed on the empirical probabilities.
|
||||
// This value was tuned to make the test pass on an arguably correct state of
|
||||
// implementation. 10^-4 precision is arguably pretty fine for this rough test, but it would
|
||||
// be interesting to improve this test.
|
||||
let precision = 10f64.powi(-3);
|
||||
|
||||
for _ in 0..REPEATS {
|
||||
// We instantiate a new generator.
|
||||
let seed = any_seed().next().unwrap();
|
||||
let mut generator = G::new(seed);
|
||||
// We create a new histogram
|
||||
let mut counts = [0usize; N_BINS];
|
||||
// We fill the histogram.
|
||||
for _ in 0..n_samples {
|
||||
counts[generator.next_byte().unwrap() as usize] += 1;
|
||||
}
|
||||
// We check that the empirical probabilities are close enough to the theoretical one.
|
||||
counts
|
||||
.iter()
|
||||
.map(|a| (*a as f64) / (n_samples as f64))
|
||||
.for_each(|a| assert!((a - expected_prob).abs() < precision))
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks that given a state and a key, the PRNG is determinist.
|
||||
pub fn test_generator_determinism<G: RandomGenerator>() {
|
||||
for _ in 0..REPEATS {
|
||||
let seed = any_seed().next().unwrap();
|
||||
let mut first_generator = G::new(seed);
|
||||
let mut second_generator = G::new(seed);
|
||||
for _ in 0..1024 {
|
||||
assert_eq!(first_generator.next(), second_generator.next());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks that forks returns a bounded child, and that the proper number of bytes can be
|
||||
/// generated.
|
||||
pub fn test_fork_children<G: RandomGenerator>() {
|
||||
for _ in 0..REPEATS {
|
||||
let ((seed, n_children), n_bytes) = any_seed()
|
||||
.zip(some_children_count())
|
||||
.zip(some_bytes_per_child())
|
||||
.next()
|
||||
.unwrap();
|
||||
let mut gen = G::new(seed);
|
||||
let mut bounded = gen.try_fork(n_children, n_bytes).unwrap().next().unwrap();
|
||||
assert_eq!(bounded.remaining_bytes(), ByteCount(n_bytes.0 as u128));
|
||||
for _ in 0..n_bytes.0 {
|
||||
bounded.next().unwrap();
|
||||
}
|
||||
|
||||
// Assert we are at the bound
|
||||
assert!(bounded.next().is_none());
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks that a bounded prng returns none when exceeding the allowed number of bytes.
|
||||
///
|
||||
/// To properly check for panic use `#[should_panic(expected = "expected test panic")]` as an
|
||||
/// attribute on the test function.
|
||||
pub fn test_bounded_none_should_panic<G: RandomGenerator>() {
|
||||
let ((seed, n_children), n_bytes) = any_seed()
|
||||
.zip(some_children_count())
|
||||
.zip(some_bytes_per_child())
|
||||
.next()
|
||||
.unwrap();
|
||||
let mut gen = G::new(seed);
|
||||
let mut bounded = gen.try_fork(n_children, n_bytes).unwrap().next().unwrap();
|
||||
assert_eq!(bounded.remaining_bytes(), ByteCount(n_bytes.0 as u128));
|
||||
for _ in 0..n_bytes.0 {
|
||||
assert!(bounded.next().is_some());
|
||||
}
|
||||
|
||||
// One call too many, should panic
|
||||
bounded.next().ok_or("expected test panic").unwrap();
|
||||
}
|
||||
}
|
||||
114
concrete-csprng/src/lib.rs
Normal file
114
concrete-csprng/src/lib.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
#![deny(rustdoc::broken_intra_doc_links)]
|
||||
//! Cryptographically secure pseudo random number generator.
|
||||
//!
|
||||
//! Welcome to the `concrete-csprng` documentation.
|
||||
//!
|
||||
//! This crate provides a fast cryptographically secure pseudo-random number generator, suited to
|
||||
//! work in a multithreaded setting.
|
||||
//!
|
||||
//! Random Generators
|
||||
//! =================
|
||||
//!
|
||||
//! The central abstraction of this crate is the [`RandomGenerator`](generators::RandomGenerator)
|
||||
//! trait, which is implemented by different types, each supporting a different platform. In
|
||||
//! essence, a type implementing [`RandomGenerator`](generators::RandomGenerator) is a type that
|
||||
//! outputs a new pseudo-random byte at each call to
|
||||
//! [`next_byte`](generators::RandomGenerator::next_byte). Such a generator `g` can be seen as
|
||||
//! enclosing a growing index into an imaginary array of pseudo-random bytes:
|
||||
//! ```ascii
|
||||
//! 0 1 2 3 4 5 6 7 8 9 M-1 │
|
||||
//! ┏━┯━┯━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓ │
|
||||
//! ┃ │ │ │ │ │ │ │ │ │ │...│ ┃ │
|
||||
//! ┗↥┷━┷━┷━┷━┷━┷━┷━┷━┷━┷━━━┷━┛ │
|
||||
//! g │
|
||||
//! │
|
||||
//! g.next_byte() │
|
||||
//! │
|
||||
//! 0 1 2 3 4 5 6 7 8 9 M-1 │
|
||||
//! ┏━┯━┯━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓ │
|
||||
//! ┃╳│ │ │ │ │ │ │ │ │ │...│ ┃ │
|
||||
//! ┗━┷↥┷━┷━┷━┷━┷━┷━┷━┷━┷━━━┷━┛ │
|
||||
//! g │
|
||||
//! │
|
||||
//! g.next_byte() │ legend:
|
||||
//! │ -------
|
||||
//! 0 1 2 3 4 5 6 7 8 9 M-1 │ ↥ : next byte to be outputted by g
|
||||
//! ┏━┯━┯━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓ │ │ │: byte not yet outputted by g
|
||||
//! ┃╳│╳│ │ │ │ │ │ │ │ │...│ ┃ │ │╳│: byte already outputted by g
|
||||
//! ┗━┷━┷↥┷━┷━┷━┷━┷━┷━┷━┷━━━┷━┛ │
|
||||
//! g 🭭
|
||||
//! ```
|
||||
//!
|
||||
//! While being large, this imaginary array is still bounded to M = 2¹³² bytes. Consequently, a
|
||||
//! generator is always bounded to a maximal index. That is, there is always a max amount of
|
||||
//! elements of this array that can be outputted by the generator. By default, generators created
|
||||
//! via [`new`](generators::RandomGenerator::new) are always bounded to M-1.
|
||||
//!
|
||||
//! Tree partition of the pseudo-random stream
|
||||
//! ==========================================
|
||||
//!
|
||||
//! One particularity of this implementation is that you can use the
|
||||
//! [`try_fork`](generators::RandomGenerator::try_fork) method to create an arbitrary partition tree
|
||||
//! of a region of this array. Indeed, calling `try_fork(nc, nb)` outputs `nc` new generators, each
|
||||
//! able to output `nb` bytes. The `try_fork` method ensures that the states and bounds of the
|
||||
//! parent and children generators are set so as to prevent the same substream to be outputted
|
||||
//! twice:
|
||||
//! ```ascii
|
||||
//! 0 1 2 3 4 5 6 7 8 9 M │
|
||||
//! ┏━┯━┯━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓ │
|
||||
//! ┃P│P│P│P│P│P│P│P│P│P│...│P┃ │
|
||||
//! ┗↥┷━┷━┷━┷━┷━┷━┷━┷━┷━┷━━━┷━┛ │
|
||||
//! p │
|
||||
//! │
|
||||
//! (a,b) = p.fork(2,4) │
|
||||
//! │
|
||||
//! 0 1 2 3 4 5 6 7 8 9 M │
|
||||
//! ┏━┯━┯━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓ │
|
||||
//! ┃A│A│A│A│B│B│B│B│P│P│...│P┃ │
|
||||
//! ┗↥┷━┷━┷━┷↥┷━┷━┷━┷↥┷━┷━━━┷━┛ │
|
||||
//! a b p │
|
||||
//! │ legend:
|
||||
//! (c,d) = b.fork(2, 1) │ -------
|
||||
//! │ ↥ : next byte to be outputted by p
|
||||
//! 0 1 2 3 4 5 6 7 8 9 M │ │P│: byte to be outputted by p
|
||||
//! ┏━┯━┯━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓ │ │╳│: byte already outputted
|
||||
//! ┃A│A│A│A│C│D│B│B│P│P│...│P┃ │
|
||||
//! ┗↥┷━┷━┷━┷↥┷↥┷↥┷━┷↥┷━┷━━━┷━┛ │
|
||||
//! a c d b p 🭭
|
||||
//! ```
|
||||
//!
|
||||
//! This makes it possible to consume the stream at different places. This is particularly useful in
|
||||
//! a multithreaded setting, in which we want to use the same generator from different independent
|
||||
//! threads:
|
||||
//!
|
||||
//! ```ascii
|
||||
//! 0 1 2 3 4 5 6 7 8 9 M │
|
||||
//! ┏━┯━┯━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓ │
|
||||
//! ┃A│A│A│A│C│D│B│B│P│P│...│P┃ │
|
||||
//! ┗↥┷━┷━┷━┷↥┷↥┷↥┷━┷↥┷━┷━━━┷━┛ │
|
||||
//! a c d b p │
|
||||
//! │
|
||||
//! a.next_byte() │
|
||||
//! │
|
||||
//! 0 1 2 3 4 5 6 7 8 9 M │
|
||||
//! ┏━┯━┯━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓ │
|
||||
//! ┃╳│A│A│A│C│D│B│B│P│P│...│P┃ │
|
||||
//! ┗━┷↥┷━┷━┷↥┷↥┷↥┷━┷↥┷━┷━━━┷━┛ │
|
||||
//! a c d b p │
|
||||
//! │ legend:
|
||||
//! b.next_byte() │ -------
|
||||
//! │ ↥ : next byte to be outputted by p
|
||||
//! 0 1 2 3 4 5 6 7 8 9 M │ │P│: byte to be outputted by p
|
||||
//! ┏━┯━┯━┯━┯━┯━┯━┯━┯━┯━┯━━━┯━┓ │ │╳│: byte already outputted
|
||||
//! ┃╳│A│A│A│C│D│╳│B│P│P│...│P┃ │
|
||||
//! ┗━┷↥┷━┷━┷↥┷↥┷━┷↥┷↥┷━┷━━━┷━┛ │
|
||||
//! a c d b p 🭭
|
||||
//! ```
|
||||
//!
|
||||
//! Implementation
|
||||
//! ==============
|
||||
//!
|
||||
//! The implementation is based on the AES blockcipher used in counter (CTR) mode, as presented
|
||||
//! in the ISO/IEC 18033-4 document.
|
||||
pub mod generators;
|
||||
pub mod seeders;
|
||||
@@ -0,0 +1,141 @@
|
||||
use crate::seeders::{Seed, Seeder};
|
||||
use libc;
|
||||
use std::cmp::Ordering;
|
||||
|
||||
/// There is no `rseed` equivalent in the ARM specification until `ARMv8.5-A`.
|
||||
/// However it seems that these instructions are not exposed in `core::arch::aarch64`.
|
||||
///
|
||||
/// Our primary interest for supporting aarch64 targets is AppleSilicon support
|
||||
/// which for the M1 macs available, they are based on the `ARMv8.4-A` set.
|
||||
///
|
||||
/// So we fall back to using a function from Apple's API which
|
||||
/// uses the [Secure Enclave] to generate cryptographically secure random bytes.
|
||||
///
|
||||
/// [Secure Enclave]: https://support.apple.com/fr-fr/guide/security/sec59b0b31ff/web
|
||||
mod secure_enclave {
|
||||
pub enum __SecRandom {}
|
||||
pub type SecRandomRef = *const __SecRandom;
|
||||
use libc::{c_int, c_void};
|
||||
|
||||
#[link(name = "Security", kind = "framework")]
|
||||
extern "C" {
|
||||
pub static kSecRandomDefault: SecRandomRef;
|
||||
|
||||
pub fn SecRandomCopyBytes(rnd: SecRandomRef, count: usize, bytes: *mut c_void) -> c_int;
|
||||
}
|
||||
|
||||
pub fn generate_random_bytes(bytes: &mut [u8]) -> std::io::Result<()> {
|
||||
// As per Apple's documentation:
|
||||
// - https://developer.apple.com/documentation/security/randomization_services?language=objc
|
||||
// - https://developer.apple.com/documentation/security/1399291-secrandomcopybytes?language=objc
|
||||
//
|
||||
// The `SecRandomCopyBytes` "Generate cryptographically secure random numbers"
|
||||
unsafe {
|
||||
let res = SecRandomCopyBytes(
|
||||
kSecRandomDefault,
|
||||
bytes.len(),
|
||||
bytes.as_mut_ptr() as *mut c_void,
|
||||
);
|
||||
if res != 0 {
|
||||
Err(std::io::Error::last_os_error())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A seeder which uses the `SecRandomCopyBytes` function from Apple's `Security` framework.
|
||||
///
|
||||
/// <https://developer.apple.com/documentation/security/1399291-secrandomcopybytes?language=objc>
|
||||
pub struct AppleSecureEnclaveSeeder;
|
||||
|
||||
impl Seeder for AppleSecureEnclaveSeeder {
|
||||
fn seed(&mut self) -> Seed {
|
||||
// 16 bytes == 128 bits
|
||||
let mut bytes = [0u8; 16];
|
||||
secure_enclave::generate_random_bytes(&mut bytes)
|
||||
.expect("Failure while using Apple secure enclave: {err:?}");
|
||||
|
||||
Seed(u128::from_le_bytes(bytes))
|
||||
}
|
||||
|
||||
fn is_available() -> bool {
|
||||
let os_version_sysctl_name = match std::ffi::CString::new("kern.osproductversion") {
|
||||
Ok(c_str) => c_str,
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
// Big enough buffer to get a version output as an ASCII string
|
||||
const OUTPUT_BUFFER_SIZE: usize = 64;
|
||||
let mut output_buffer_size = OUTPUT_BUFFER_SIZE;
|
||||
let mut output_buffer = [0u8; OUTPUT_BUFFER_SIZE];
|
||||
let res = unsafe {
|
||||
libc::sysctlbyname(
|
||||
os_version_sysctl_name.as_ptr() as *const _ as *const _,
|
||||
&mut output_buffer as *mut _ as *mut _,
|
||||
&mut output_buffer_size as *mut _ as *mut _,
|
||||
std::ptr::null_mut(),
|
||||
0,
|
||||
)
|
||||
};
|
||||
|
||||
if res != 0 {
|
||||
return false;
|
||||
}
|
||||
|
||||
let result_c_str =
|
||||
match std::ffi::CStr::from_bytes_with_nul(&output_buffer[..output_buffer_size]) {
|
||||
Ok(c_str) => c_str,
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
let result_string = match result_c_str.to_str() {
|
||||
Ok(str) => str,
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
// Normally we get a major version and minor version
|
||||
let split_string: Vec<&str> = result_string.split('.').collect();
|
||||
|
||||
let mut major = -1;
|
||||
let mut minor = -1;
|
||||
|
||||
// Major part of the version string
|
||||
if !split_string.is_empty() {
|
||||
major = match split_string[0].parse() {
|
||||
Ok(major_from_str) => major_from_str,
|
||||
_ => return false,
|
||||
};
|
||||
}
|
||||
|
||||
// SecRandomCopyBytes is available starting with mac OS 10.7
|
||||
// https://developer.apple.com/documentation/security/1399291-secrandomcopybytes?language=objc
|
||||
// This match pattern is recommended by clippy, so we oblige here
|
||||
match major.cmp(&10) {
|
||||
Ordering::Greater => true,
|
||||
Ordering::Equal => {
|
||||
// Minor part of the version string
|
||||
if split_string.len() >= 2 {
|
||||
minor = match split_string[1].parse() {
|
||||
Ok(minor_from_str) => minor_from_str,
|
||||
_ => return false,
|
||||
};
|
||||
}
|
||||
minor >= 7
|
||||
}
|
||||
Ordering::Less => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::seeders::generic_tests::check_seeder_fixed_sequences_different;
|
||||
|
||||
#[test]
|
||||
fn check_bounded_sequence_difference() {
|
||||
check_seeder_fixed_sequences_different(|_| AppleSecureEnclaveSeeder);
|
||||
}
|
||||
}
|
||||
14
concrete-csprng/src/seeders/implem/mod.rs
Normal file
14
concrete-csprng/src/seeders/implem/mod.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
#[cfg(target_os = "macos")]
|
||||
mod apple_secure_enclave_seeder;
|
||||
#[cfg(target_os = "macos")]
|
||||
pub use apple_secure_enclave_seeder::AppleSecureEnclaveSeeder;
|
||||
|
||||
#[cfg(feature = "seeder_x86_64_rdseed")]
|
||||
mod rdseed;
|
||||
#[cfg(feature = "seeder_x86_64_rdseed")]
|
||||
pub use rdseed::RdseedSeeder;
|
||||
|
||||
#[cfg(feature = "seeder_unix")]
|
||||
mod unix;
|
||||
#[cfg(feature = "seeder_unix")]
|
||||
pub use unix::UnixSeeder;
|
||||
51
concrete-csprng/src/seeders/implem/rdseed.rs
Normal file
51
concrete-csprng/src/seeders/implem/rdseed.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
use crate::seeders::{Seed, Seeder};
|
||||
|
||||
/// A seeder which uses the `rdseed` x86_64 instruction.
|
||||
///
|
||||
/// The `rdseed` instruction allows to deliver seeds from a hardware source of entropy see
|
||||
/// <https://www.felixcloutier.com/x86/rdseed> .
|
||||
pub struct RdseedSeeder;
|
||||
|
||||
impl Seeder for RdseedSeeder {
|
||||
fn seed(&mut self) -> Seed {
|
||||
Seed(unsafe { rdseed_random_m128() })
|
||||
}
|
||||
|
||||
fn is_available() -> bool {
|
||||
is_x86_feature_detected!("rdseed")
|
||||
}
|
||||
}
|
||||
|
||||
// Generates a random 128 bits value from rdseed
|
||||
#[target_feature(enable = "rdseed")]
|
||||
unsafe fn rdseed_random_m128() -> u128 {
|
||||
let mut rand1: u64 = 0;
|
||||
let mut rand2: u64 = 0;
|
||||
let mut output_bytes = [0u8; 16];
|
||||
unsafe {
|
||||
loop {
|
||||
if core::arch::x86_64::_rdseed64_step(&mut rand1) == 1 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
loop {
|
||||
if core::arch::x86_64::_rdseed64_step(&mut rand2) == 1 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
output_bytes[0..8].copy_from_slice(&rand1.to_ne_bytes());
|
||||
output_bytes[8..16].copy_from_slice(&rand2.to_ne_bytes());
|
||||
u128::from_ne_bytes(output_bytes)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::seeders::generic_tests::check_seeder_fixed_sequences_different;
|
||||
|
||||
#[test]
|
||||
fn check_bounded_sequence_difference() {
|
||||
check_seeder_fixed_sequences_different(|_| RdseedSeeder);
|
||||
}
|
||||
}
|
||||
72
concrete-csprng/src/seeders/implem/unix.rs
Normal file
72
concrete-csprng/src/seeders/implem/unix.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use crate::seeders::{Seed, Seeder};
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
|
||||
/// A seeder which uses the `/dev/random` source on unix-like systems.
|
||||
pub struct UnixSeeder {
|
||||
counter: u128,
|
||||
secret: u128,
|
||||
file: File,
|
||||
}
|
||||
|
||||
impl UnixSeeder {
|
||||
/// Creates a new seeder from a user defined secret.
|
||||
///
|
||||
/// Important:
|
||||
/// ----------
|
||||
///
|
||||
/// This secret is used to ensure the quality of the seed in scenarios where `/dev/random` may
|
||||
/// be compromised.
|
||||
///
|
||||
/// The attack hypotheses are as follow:
|
||||
/// - `/dev/random` output can be predicted by a process running on the machine by just
|
||||
/// observing various states of the machine
|
||||
/// - The attacker cannot read data from the process where `concrete-csprng` is running
|
||||
///
|
||||
/// Using a secret in `concrete-csprng` allows to generate values that the attacker cannot
|
||||
/// predict, making this seeder secure on systems were `/dev/random` outputs can be
|
||||
/// predicted.
|
||||
pub fn new(secret: u128) -> UnixSeeder {
|
||||
let file = std::fs::File::open("/dev/random").expect("Failed to open /dev/random .");
|
||||
let counter = std::time::UNIX_EPOCH
|
||||
.elapsed()
|
||||
.expect("Failed to initialize unix seeder.")
|
||||
.as_nanos();
|
||||
UnixSeeder {
|
||||
secret,
|
||||
counter,
|
||||
file,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Seeder for UnixSeeder {
|
||||
fn seed(&mut self) -> Seed {
|
||||
let output = self.secret ^ self.counter ^ dev_random(&mut self.file);
|
||||
self.counter = self.counter.wrapping_add(1);
|
||||
Seed(output)
|
||||
}
|
||||
|
||||
fn is_available() -> bool {
|
||||
cfg!(target_family = "unix")
|
||||
}
|
||||
}
|
||||
|
||||
fn dev_random(random: &mut File) -> u128 {
|
||||
let mut buf = [0u8; 16];
|
||||
random
|
||||
.read_exact(&mut buf[..])
|
||||
.expect("Failed to read from /dev/random .");
|
||||
u128::from_ne_bytes(buf)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::seeders::generic_tests::check_seeder_fixed_sequences_different;
|
||||
|
||||
#[test]
|
||||
fn check_bounded_sequence_difference() {
|
||||
check_seeder_fixed_sequences_different(UnixSeeder::new);
|
||||
}
|
||||
}
|
||||
47
concrete-csprng/src/seeders/mod.rs
Normal file
47
concrete-csprng/src/seeders/mod.rs
Normal file
@@ -0,0 +1,47 @@
|
||||
//! A module containing seeders objects.
|
||||
//!
|
||||
//! When initializing a generator, one needs to provide a [`Seed`], which is then used as key to the
|
||||
//! AES blockcipher. As a consequence, the quality of the outputs of the generator is directly
|
||||
//! conditioned by the quality of this seed. This module proposes different mechanisms to deliver
|
||||
//! seeds that can accommodate varying scenarios.
|
||||
|
||||
/// A seed value, used to initialize a generator.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub struct Seed(pub u128);
|
||||
|
||||
/// A trait representing a seeding strategy.
|
||||
pub trait Seeder {
|
||||
/// Generates a new seed.
|
||||
fn seed(&mut self) -> Seed;
|
||||
|
||||
/// Check whether the seeder can be used on the current machine. This function may check if some
|
||||
/// required CPU features are available or if some OS features are available for example.
|
||||
fn is_available() -> bool
|
||||
where
|
||||
Self: Sized;
|
||||
}
|
||||
|
||||
mod implem;
|
||||
pub use implem::*;
|
||||
|
||||
#[cfg(test)]
|
||||
mod generic_tests {
|
||||
use crate::seeders::Seeder;
|
||||
|
||||
/// Naively verifies that two fixed-size sequences generated by repeatedly calling the seeder
|
||||
/// are different.
|
||||
#[allow(unused)] // to please clippy when tests are not activated
|
||||
pub fn check_seeder_fixed_sequences_different<S: Seeder, F: Fn(u128) -> S>(
|
||||
construct_seeder: F,
|
||||
) {
|
||||
const SEQUENCE_SIZE: usize = 500;
|
||||
const REPEATS: usize = 10_000;
|
||||
for i in 0..REPEATS {
|
||||
let mut seeder = construct_seeder(i as u128);
|
||||
let orig_seed = seeder.seed();
|
||||
for _ in 0..SEQUENCE_SIZE {
|
||||
assert_ne!(seeder.seed(), orig_seed);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -28,14 +28,12 @@ RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > install-rustup.s
|
||||
chmod +x install-rustup.sh && \
|
||||
./install-rustup.sh -y --default-toolchain "${RUST_TOOLCHAIN}" \
|
||||
-c rust-src -t wasm32-unknown-unknown && \
|
||||
curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf > install-wasm-pack.sh && \
|
||||
chmod +x install-wasm-pack.sh && \
|
||||
. "$HOME/.cargo/env" && \
|
||||
./install-wasm-pack.sh -y && \
|
||||
cargo install wasm-pack && \
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh > install-node.sh && \
|
||||
chmod +x install-node.sh && \
|
||||
./install-node.sh && \
|
||||
. "$HOME/.nvm/nvm.sh" && \
|
||||
bash -i -c 'nvm install node && nvm use node'
|
||||
bash -i -c 'nvm install 20 && nvm use 20'
|
||||
|
||||
WORKDIR /tfhe-wasm-tests/tfhe-rs/
|
||||
|
||||
@@ -40,7 +40,7 @@ mkdir -p "${TFHE_BUILD_DIR}"
|
||||
|
||||
cd "${TFHE_BUILD_DIR}"
|
||||
|
||||
cmake .. -DCMAKE_BUILD_TYPE=RELEASE
|
||||
cmake .. -DCMAKE_BUILD_TYPE=RELEASE -DCARGO_PROFILE="${CARGO_PROFILE}"
|
||||
|
||||
make -j
|
||||
|
||||
@@ -56,4 +56,4 @@ if [[ $(uname) == "Darwin" ]]; then
|
||||
fi
|
||||
|
||||
# Let's go parallel
|
||||
ARGS="-j$("${nproc_bin}")" make test
|
||||
ARGS="-j$(${nproc_bin})" make test
|
||||
|
||||
@@ -2,18 +2,20 @@
|
||||
|
||||
set -e
|
||||
|
||||
CURR_DIR="$(dirname "$0")"
|
||||
REL_CARGO_TOML_PATH="${CURR_DIR}/../tfhe/Cargo.toml"
|
||||
MIN_RUST_VERSION="$(grep rust-version "${REL_CARGO_TOML_PATH}" | cut -d '=' -f 2 | xargs)"
|
||||
|
||||
function usage() {
|
||||
echo "$0: check minimum cargo version"
|
||||
echo
|
||||
echo "--help Print this message"
|
||||
echo "--rust-toolchain The toolchain to check the version for with leading"
|
||||
echo "--min-rust-version Check toolchain version is >= to this version, default is 1.65"
|
||||
echo "--min-rust-version Check toolchain version is >= to this version, default is ${MIN_RUST_VERSION}"
|
||||
echo
|
||||
}
|
||||
|
||||
RUST_TOOLCHAIN=""
|
||||
# We set the default rust version 1.65 which is the minimum version required for stable GATs
|
||||
MIN_RUST_VERSION="1.65"
|
||||
|
||||
while [ -n "$1" ]
|
||||
do
|
||||
|
||||
77
scripts/dieharder_test.sh
Executable file
77
scripts/dieharder_test.sh
Executable file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# dieharder does not support running a subset of its tests, so we'll check which ones are not good
|
||||
# and ignore the output from those tests in the final log
|
||||
|
||||
set -e
|
||||
|
||||
DIEHARDER_RUN_LOG_FILE="dieharder_run.log"
|
||||
|
||||
bad_tests="$(dieharder -l | \
|
||||
# select lines with the -d
|
||||
grep -w '\-d' | \
|
||||
# forget about the good tests
|
||||
grep -v -i 'good' | \
|
||||
# get the test id
|
||||
cut -d ' ' -f 4 | \
|
||||
# nice formatting
|
||||
xargs)"
|
||||
|
||||
|
||||
bad_test_filter=""
|
||||
for bad_test in ${bad_tests}; do
|
||||
bad_test_filter="${bad_test_filter:+${bad_test_filter}|}$(dieharder -d "${bad_test}" -t 1 -p 1 -D test_name | xargs)"
|
||||
done
|
||||
|
||||
echo "The following tests will be ignored as they are marked as either 'suspect' or 'do not use': "
|
||||
echo ""
|
||||
echo "${bad_test_filter}"
|
||||
echo ""
|
||||
|
||||
# by default we may have no pv just forward the input
|
||||
pv="cat"
|
||||
if which pv > /dev/null; then
|
||||
pv="pv -t -a -b"
|
||||
fi
|
||||
|
||||
rm -f "${DIEHARDER_RUN_LOG_FILE}"
|
||||
|
||||
# ignore potential errors and parse the log afterwards
|
||||
set +e
|
||||
|
||||
# We are writing in both cases
|
||||
# shellcheck disable=SC2094
|
||||
./target/release/examples/generate 2>"${DIEHARDER_RUN_LOG_FILE}" | \
|
||||
$pv | \
|
||||
# -a: all tests
|
||||
# -g 200: get random bytes from input
|
||||
# -Y 1: disambiguate results, i.e. if a weak result appear check if it's a random failure/weakness
|
||||
# -k 2: better maths formulas to determine some test statistics
|
||||
dieharder -a -g 200 -Y 1 -k 2 | \
|
||||
tee -a "${DIEHARDER_RUN_LOG_FILE}"
|
||||
set -e
|
||||
|
||||
printf "\n\n"
|
||||
|
||||
cat "${DIEHARDER_RUN_LOG_FILE}"
|
||||
|
||||
if ! grep -q -i "failed" < "${DIEHARDER_RUN_LOG_FILE}"; then
|
||||
echo "All tests passed!"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
printf "\n\n"
|
||||
|
||||
failed_tests="$(grep -i "failed" < "${DIEHARDER_RUN_LOG_FILE}")"
|
||||
true_failed_test="$(grep -i "failed" < "${DIEHARDER_RUN_LOG_FILE}" | { grep -v -E "${bad_test_filter}" || true; } | sed -z '$ s/\n$//')"
|
||||
|
||||
if [[ "${true_failed_test}" == "" ]]; then
|
||||
echo "There were test failures, but the tests were either marked as 'suspect' or 'do not use'"
|
||||
echo "${failed_tests}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "The following tests failed:"
|
||||
echo "${true_failed_test}"
|
||||
|
||||
exit 1
|
||||
@@ -2,6 +2,65 @@
|
||||
|
||||
set -e
|
||||
|
||||
function usage() {
|
||||
echo "$0: shortint test runner"
|
||||
echo
|
||||
echo "--help Print this message"
|
||||
echo "--rust-toolchain The toolchain to run the tests with default: stable"
|
||||
echo "--multi-bit Run multi-bit tests only: default off"
|
||||
echo "--cargo-profile The cargo profile used to build tests"
|
||||
echo "--avx512-support Set to ON to enable avx512"
|
||||
echo
|
||||
}
|
||||
|
||||
RUST_TOOLCHAIN="+stable"
|
||||
multi_bit=""
|
||||
not_multi_bit="_multi_bit"
|
||||
cargo_profile="release"
|
||||
avx512_feature=""
|
||||
|
||||
while [ -n "$1" ]
|
||||
do
|
||||
case "$1" in
|
||||
"--help" | "-h" )
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
|
||||
"--rust-toolchain" )
|
||||
shift
|
||||
RUST_TOOLCHAIN="$1"
|
||||
;;
|
||||
|
||||
"--multi-bit" )
|
||||
multi_bit="_multi_bit"
|
||||
not_multi_bit=""
|
||||
;;
|
||||
|
||||
"--cargo-profile" )
|
||||
shift
|
||||
cargo_profile="$1"
|
||||
;;
|
||||
|
||||
"--avx512-support" )
|
||||
shift
|
||||
if [[ "$1" == "ON" ]]; then
|
||||
avx512_feature=nightly-avx512
|
||||
fi
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown param : $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [[ "${RUST_TOOLCHAIN::1}" != "+" ]]; then
|
||||
RUST_TOOLCHAIN="+${RUST_TOOLCHAIN}"
|
||||
fi
|
||||
|
||||
CURR_DIR="$(dirname "$0")"
|
||||
ARCH_FEATURE="$("${CURR_DIR}/get_arch_feature.sh")"
|
||||
|
||||
@@ -25,59 +84,91 @@ else
|
||||
fi
|
||||
|
||||
if [[ "${BIG_TESTS_INSTANCE}" != TRUE ]]; then
|
||||
# block pbs are too slow for high params
|
||||
# mul_crt_4_4 is extremely flaky (~80% failure)
|
||||
# test_wopbs_bivariate_crt_wopbs_param_message generate tables that are too big at the moment
|
||||
# test_integer_smart_mul_param_message_4_carry_4 is too slow
|
||||
filter_expression=''\
|
||||
'test(/^integer::.*$/)'\
|
||||
'and not test(/.*_block_pbs(_base)?_param_message_[34]_carry_[34]$/)'\
|
||||
'and not test(~mul_crt_param_message_4_carry_4)'\
|
||||
'and not test(/.*test_wopbs_bivariate_crt_wopbs_param_message_[34]_carry_[34]$/)'\
|
||||
'and not test(/.*test_integer_smart_mul_param_message_4_carry_4$/)'
|
||||
if [[ "${FAST_TESTS}" != TRUE ]]; then
|
||||
# block pbs are too slow for high params
|
||||
# mul_crt_4_4 is extremely flaky (~80% failure)
|
||||
# test_wopbs_bivariate_crt_wopbs_param_message generate tables that are too big at the moment
|
||||
# test_integer_smart_mul_param_message_4_carry_4_ks_pbs is too slow
|
||||
# so is test_integer_default_add_sequence_multi_thread_param_message_4_carry_4_ks_pbs
|
||||
filter_expression="""\
|
||||
test(/^integer::.*${multi_bit}/) \
|
||||
${not_multi_bit:+"and not test(~${not_multi_bit})"} \
|
||||
and not test(/.*_block_pbs(_base)?_param_message_[34]_carry_[34]_ks_pbs$/) \
|
||||
and not test(~mul_crt_param_message_4_carry_4_ks_pbs) \
|
||||
and not test(/.*test_wopbs_bivariate_crt_wopbs_param_message_[34]_carry_[34]_ks_pbs$/) \
|
||||
and not test(/.*test_integer_smart_mul_param_message_4_carry_4_ks_pbs$/) \
|
||||
and not test(/.*test_integer_default_add_sequence_multi_thread_param_message_4_carry_4_ks_pbs$/)"""
|
||||
else
|
||||
# test only fast default operations with only two set of parameters
|
||||
filter_expression="""\
|
||||
test(/^integer::.*${multi_bit}/) \
|
||||
${not_multi_bit:+"and not test(~${not_multi_bit})"} \
|
||||
and test(/.*_default_.*?_param${multi_bit}_message_[2-3]_carry_[2-3]${multi_bit:+"_group_2"}_ks_pbs/) \
|
||||
and not test(/.*_param_message_[14]_carry_[14]_ks_pbs$/) \
|
||||
and not test(/.*default_add_sequence_multi_thread_param_message_3_carry_3_ks_pbs$/)"""
|
||||
fi
|
||||
|
||||
cargo ${1:+"${1}"} nextest run \
|
||||
cargo "${RUST_TOOLCHAIN}" nextest run \
|
||||
--tests \
|
||||
--release \
|
||||
--cargo-profile "${cargo_profile}" \
|
||||
--package tfhe \
|
||||
--profile ci \
|
||||
--features="${ARCH_FEATURE}",integer,internal-keycache \
|
||||
--features="${ARCH_FEATURE}",integer,internal-keycache,"${avx512_feature}" \
|
||||
--test-threads "${n_threads}" \
|
||||
-E "$filter_expression"
|
||||
|
||||
cargo ${1:+"${1}"} test \
|
||||
--release \
|
||||
--package tfhe \
|
||||
--features="${ARCH_FEATURE}",integer,internal-keycache \
|
||||
--doc \
|
||||
integer::
|
||||
if [[ "${multi_bit}" == "" ]]; then
|
||||
cargo "${RUST_TOOLCHAIN}" test \
|
||||
--profile "${cargo_profile}" \
|
||||
--package tfhe \
|
||||
--features="${ARCH_FEATURE}",integer,internal-keycache,"${avx512_feature}" \
|
||||
--doc \
|
||||
-- integer::
|
||||
fi
|
||||
else
|
||||
# block pbs are too slow for high params
|
||||
# mul_crt_4_4 is extremely flaky (~80% failure)
|
||||
# test_wopbs_bivariate_crt_wopbs_param_message generate tables that are too big at the moment
|
||||
# test_integer_smart_mul_param_message_4_carry_4 is too slow
|
||||
filter_expression=''\
|
||||
'test(/^integer::.*$/)'\
|
||||
'and not test(/.*_block_pbs(_base)?_param_message_[34]_carry_[34]$/)'\
|
||||
'and not test(~mul_crt_param_message_4_carry_4)'\
|
||||
'and not test(/.*test_wopbs_bivariate_crt_wopbs_param_message_[34]_carry_[34]$/)'\
|
||||
'and not test(/.*test_integer_smart_mul_param_message_4_carry_4$/)'
|
||||
if [[ "${FAST_TESTS}" != TRUE ]]; then
|
||||
# block pbs are too slow for high params
|
||||
# mul_crt_4_4 is extremely flaky (~80% failure)
|
||||
# test_wopbs_bivariate_crt_wopbs_param_message generate tables that are too big at the moment
|
||||
# test_integer_smart_mul_param_message_4_carry_4_ks_pbs is too slow
|
||||
# so is test_integer_default_add_sequence_multi_thread_param_message_4_carry_4_ks_pbs
|
||||
filter_expression="""\
|
||||
test(/^integer::.*${multi_bit}/) \
|
||||
${not_multi_bit:+"and not test(~${not_multi_bit})"} \
|
||||
and not test(/.*_block_pbs(_base)?_param_message_[34]_carry_[34]_ks_pbs$/) \
|
||||
and not test(~mul_crt_param_message_4_carry_4_ks_pbs) \
|
||||
and not test(/.*test_wopbs_bivariate_crt_wopbs_param_message_[34]_carry_[34]_ks_pbs$/) \
|
||||
and not test(/.*test_integer_smart_mul_param_message_4_carry_4_ks_pbs$/) \
|
||||
and not test(/.*test_integer_default_add_sequence_multi_thread_param_message_4_carry_4_ks_pbs$/)"""
|
||||
else
|
||||
# test only fast default operations with only two set of parameters
|
||||
filter_expression="""\
|
||||
test(/^integer::.*${multi_bit}/) \
|
||||
${not_multi_bit:+"and not test(~${not_multi_bit})"} \
|
||||
and test(/.*_default_.*?_param${multi_bit}_message_[2-3]_carry_[2-3]${multi_bit:+"_group_2"}_ks_pbs/) \
|
||||
and not test(/.*_param_message_[14]_carry_[14]_ks_pbs$/) \
|
||||
and not test(/.*default_add_sequence_multi_thread_param_message_3_carry_3_ks_pbs$/)"""
|
||||
fi
|
||||
|
||||
cargo ${1:+"${1}"} nextest run \
|
||||
num_cpu_threads="$(${nproc_bin})"
|
||||
num_threads=$((num_cpu_threads * 1 / 2))
|
||||
cargo "${RUST_TOOLCHAIN}" nextest run \
|
||||
--tests \
|
||||
--release \
|
||||
--cargo-profile "${cargo_profile}" \
|
||||
--package tfhe \
|
||||
--profile ci \
|
||||
--features="${ARCH_FEATURE}",integer,internal-keycache \
|
||||
--test-threads "$(${nproc_bin})" \
|
||||
--features="${ARCH_FEATURE}",integer,internal-keycache,"${avx512_feature}" \
|
||||
--test-threads $num_threads \
|
||||
-E "$filter_expression"
|
||||
|
||||
cargo ${1:+"${1}"} test \
|
||||
--release \
|
||||
--package tfhe \
|
||||
--features="${ARCH_FEATURE}",integer,internal-keycache \
|
||||
--doc \
|
||||
integer:: -- --test-threads="$(${nproc_bin})"
|
||||
if [[ "${multi_bit}" == "" ]]; then
|
||||
cargo "${RUST_TOOLCHAIN}" test \
|
||||
--profile "${cargo_profile}" \
|
||||
--package tfhe \
|
||||
--features="${ARCH_FEATURE}",integer,internal-keycache,"${avx512_feature}" \
|
||||
--doc \
|
||||
-- --test-threads="$(${nproc_bin})" integer::
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Test ran in $SECONDS seconds"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user