chore: Move to the mono repo layout

This commit is contained in:
Quentin Bourgerie
2023-03-08 11:23:21 +01:00
parent 4fb476aaec
commit ce7eddc22d
201 changed files with 0 additions and 0 deletions

View File

@@ -0,0 +1,5 @@
## Summary
_Please fill here with information about the main features in this release, or the main reason for having a delivery (e.g., fixing an annoying bug)_
## Links

View File

@@ -0,0 +1,40 @@
"""Helper script for github actions to combine job statuses"""
import argparse
import json
RESULTS_TO_DISPLAY_LEVEL = {
"failure": 0,
"cancelled": 1,
"success": 2,
"skipped": 3,
}
DISPLAY_LEVEL_TO_RESULTS = {val: key for key, val in RESULTS_TO_DISPLAY_LEVEL.items()}
def main(args):
"""Entry point"""
need_context_data = None
with open(args.needs_context_json, encoding="utf-8") as f:
need_context_data = json.load(f)
display_level = min(
RESULTS_TO_DISPLAY_LEVEL[job_object["result"]] for job_object in need_context_data.values()
)
print(DISPLAY_LEVEL_TO_RESULTS[display_level])
if __name__ == "__main__":
parser = argparse.ArgumentParser("Combine github actions statuses", allow_abbrev=False)
parser.add_argument(
"--needs_context_json",
type=str,
help="Pass the json file path containing the workflow needs context",
)
cli_args = parser.parse_args()
main(cli_args)

View File

@@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -o pipefail
set +e
CURR_DIR=$(dirname "$0")
# Format diff-coverage.txt for PR comment
poetry run python "$CURR_DIR"/coverage_report_format.py \
global-coverage \
--global-coverage-json-file "$1" \
--global-coverage-output-file diff-coverage.txt

View File

@@ -0,0 +1,74 @@
"""Helper script for github actions"""
import argparse
import json
from pathlib import Path
def write_coverage_file(coverage_file_path: Path, exit_code: int, coverage_content):
"""Write the formatted coverage to file."""
with open(coverage_file_path, "w", encoding="utf-8") as f:
if exit_code == 0:
f.write("## Coverage passed ✅\n\n")
else:
f.write("## Coverage failed ❌\n\n")
# Open collapsible section
f.write("<details><summary>Coverage details</summary>\n<p>\n\n")
f.write("```\n")
f.writelines(coverage_content)
# Close collapsible section
f.write("```\n\n")
f.write("</p>\n</details>\n\n")
def diff_coverage(args):
"""diff-coverage entry point."""
diff_cover_file_path = Path(args.diff_cover_output).resolve()
diff_cover_content = None
with open(diff_cover_file_path, "r", encoding="utf-8") as f:
diff_cover_content = f.readlines()
write_coverage_file(diff_cover_file_path, args.diff_cover_exit_code, diff_cover_content)
def global_coverage(args):
"""global-coverage entry point."""
global_coverage_json_path = Path(args.global_coverage_json_file).resolve()
global_coverage_infos = None
with open(global_coverage_json_path, "r", encoding="utf-8") as f:
global_coverage_infos = json.load(f)
exit_code = global_coverage_infos["exit_code"]
coverage_content = global_coverage_infos["content"]
global_coverage_output_file_path = Path(args.global_coverage_output_file).resolve()
write_coverage_file(global_coverage_output_file_path, exit_code, coverage_content)
def main(args):
"""Entry point"""
args.entry_point(args)
if __name__ == "__main__":
main_parser = argparse.ArgumentParser(allow_abbrev=False)
sub_parsers = main_parser.add_subparsers(dest="sub-command", required=True)
parser_diff_coverage = sub_parsers.add_parser("diff-coverage")
parser_diff_coverage.add_argument("--diff-cover-exit-code", type=int, required=True)
parser_diff_coverage.add_argument("--diff-cover-output", type=str, required=True)
parser_diff_coverage.set_defaults(entry_point=diff_coverage)
parser_global_coverage = sub_parsers.add_parser("global-coverage")
parser_global_coverage.add_argument("--global-coverage-output-file", type=str, required=True)
parser_global_coverage.add_argument("--global-coverage-json-file", type=str, required=True)
parser_global_coverage.set_defaults(entry_point=global_coverage)
cli_args = main_parser.parse_args()
main(cli_args)

View File

@@ -0,0 +1,95 @@
"""Script to generate custom GitHub actions test matrices."""
import argparse
import itertools
import json
from pathlib import Path
WEEKLY = "weekly"
RELEASE = "release"
PR = "pr"
PUSH_TO_MAIN = "push_to_main"
LINUX = "linux"
MACOS = "macos"
OSES = {LINUX, MACOS}
PR_OSES = {LINUX: "ubuntu-22.04"}
PR_PYTHON_VERSIONS = ["3.7"]
PR_CONF = {"os": PR_OSES, "python": PR_PYTHON_VERSIONS}
PUSH_TO_MAIN_OSES = {LINUX: "ubuntu-22.04"}
PUSH_TO_MAIN_PYTHON_VERSIONS = ["3.7"]
PUSH_TO_MAIN_CONF = {"os": PUSH_TO_MAIN_OSES, "python": PUSH_TO_MAIN_PYTHON_VERSIONS}
WEEKLY_OSES = {
LINUX: "ubuntu-22.04",
MACOS: "macos-11",
}
WEEKLY_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"]
WEEKLY_CONF = {"os": WEEKLY_OSES, "python": WEEKLY_PYTHON_VERSIONS}
# The OSes here are to indicate the OSes used for runners during release
RELEASE_OSES = {
LINUX: "ubuntu-22.04",
# TODO: https://github.com/zama-ai/concrete-numpy-internal/issues/1340
# Re-enable macOS for release once we have the duration of the tests
# MACOS: "macos-10.15",
}
# The python versions will be used to build packages during release
RELEASE_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"]
RELEASE_CONF = {"os": RELEASE_OSES, "python": RELEASE_PYTHON_VERSIONS}
CONFIGURATIONS = {
PR: PR_CONF,
WEEKLY: WEEKLY_CONF,
RELEASE: RELEASE_CONF,
PUSH_TO_MAIN: PUSH_TO_MAIN_CONF,
}
def main(args):
"""Entry point."""
matrix_conf = CONFIGURATIONS[args.build_type]
github_action_matrix = []
for (os_kind, os_name), python_version in itertools.product(
matrix_conf["os"].items(), matrix_conf["python"]
):
github_action_matrix.append(
{
"os_kind": os_kind,
"runs_on": os_name,
"python_version": python_version,
}
)
print(json.dumps(github_action_matrix, indent=4))
output_json_path = Path(args.output_json).resolve()
with open(output_json_path, "w", encoding="utf-8") as f:
json.dump(github_action_matrix, f)
if __name__ == "__main__":
parser = argparse.ArgumentParser("Generate GHA test matrices", allow_abbrev=False)
parser.add_argument(
"--build-type",
type=str,
required=True,
choices=[WEEKLY, RELEASE, PR, PUSH_TO_MAIN],
help="The type of build for which the matrix generation is required",
)
parser.add_argument(
"--output-json", type=str, required=True, help="Where to output the matrix as json data"
)
cli_args = parser.parse_args()
main(cli_args)

View File

@@ -0,0 +1,163 @@
"""Tool to manage the versions.html file at the root of our docs sites."""
import argparse
from pathlib import Path
from bs4 import BeautifulSoup
from bs4.element import Tag
from semver import VersionInfo
VERSIONS_LIST_ID = "versions-list"
def strip_leading_v(version_str: str):
"""Strip leading v of a version which is not SemVer compatible."""
return version_str[1:] if version_str.startswith("v") else version_str
def create_list_element(soup: BeautifulSoup, contents: Tag) -> Tag:
"""Create a list element for links.
Args:
soup (BeautifulSoup): The soup to use to create the tag.
Returns:
Tag: tag containing <li class="toctree-l1"></li>.
"""
new_list_element = soup.new_tag("li", **{"class": "toctree-l1"})
new_list_element.contents.append(contents)
return new_list_element
def create_link_tag_set_string(soup: BeautifulSoup, version_string: str) -> Tag:
"""Create a link tag on the given soup to version specified by version_string.
Args:
soup (BeautifulSoup): The soup to use to create the tag.
version_string (str): The version string to use.
Returns:
Tag: tag containing <a class="reference internal" href="0.1.0/">{version_string}</a>.
"""
new_tag = soup.new_tag(
"a",
**{
"href": f"{version_string}/",
"class": "reference internal",
},
)
new_tag.string = version_string
return new_tag
def main(args):
"""Entry point."""
invalid_versions = [
version
for version in args.add_versions
if not VersionInfo.isvalid(strip_leading_v(version))
]
if len(invalid_versions) > 0:
message = f"Found invalid versions: {invalid_versions}"
raise RuntimeError(message)
version_html = None
version_html_file_path = Path(args.versions_html_file).resolve()
with open(version_html_file_path, "r", encoding="utf-8") as f:
version_html = BeautifulSoup(f, "html.parser")
if version_html is None:
message = f"An error occured while trying to load {str(version_html_file_path)}"
raise RuntimeError(message)
print(version_html)
version_list = version_html.find(id=VERSIONS_LIST_ID)
if version_list is None or version_list.name != "ul":
message = f"Could not find <ul> tag with id {VERSIONS_LIST_ID}"
raise RuntimeError(message)
non_semver_versions = {}
semver_versions = {}
for list_entry in version_list.find_all("li"):
version_tags = []
version_is_valid_semver = False
for potential_version_tag in list_entry.contents:
if not isinstance(potential_version_tag, Tag):
continue
version_is_valid_semver = VersionInfo.isvalid(
strip_leading_v(potential_version_tag.string)
)
version_tags.append(potential_version_tag.string)
num_version_tags = len(version_tags)
assert num_version_tags == 1, f"Can only have 1 version tag, got {num_version_tags}"
version_tag = version_tags[0]
if version_is_valid_semver:
semver_versions[version_tag.string] = list_entry
else:
non_semver_versions[version_tag.string] = list_entry
parsed_versions = [VersionInfo.parse(version) for version in args.add_versions]
versions_already_in_html = set(parsed_versions).intersection(semver_versions.keys())
if len(versions_already_in_html) > 0:
message = (
"The following versions are already in the html: "
f"{', '.join(str(ver) for ver in sorted(versions_already_in_html))}"
)
raise RuntimeError(message)
semver_versions.update(
(
parsed_version,
create_list_element(
version_html, create_link_tag_set_string(version_html, str(parsed_version))
),
)
for parsed_version in parsed_versions
)
version_list.contents = []
for sorted_non_semver_version in sorted(non_semver_versions.keys()):
version_list.contents.append(non_semver_versions[sorted_non_semver_version])
# We want the most recent versions at the top
for sorted_semver_version in sorted(semver_versions.keys(), reverse=True):
version_list.contents.append(semver_versions[sorted_semver_version])
pretty_output = version_html.prettify()
print(pretty_output)
output_html_path = Path(args.output_html).resolve()
with open(output_html_path, "w", encoding="utf-8") as f:
f.write(pretty_output)
if __name__ == "__main__":
parser = argparse.ArgumentParser("versions.html generator", allow_abbrev=False)
parser.add_argument(
"--add-versions",
type=str,
required=True,
nargs="+",
help="A list of versions to add to versions.html. "
"The links will be sorted by versions with stable/main as the first entry. "
"The link will point to '$VERSION/' and will have text '$VERSION'.",
)
parser.add_argument(
"--versions-html-file",
type=str,
required=True,
help="Path to the versions.html to update. "
'It must have a <li> tag with id="versions-list".',
)
parser.add_argument("--output-html", type=str, required=True, help="Output file path.")
cli_args = parser.parse_args()
main(cli_args)

View File

@@ -0,0 +1,81 @@
"""Tool to manage the versions.json file at the root of our docs sites."""
import argparse
import json
from json.decoder import JSONDecodeError
from pathlib import Path
from semver import VersionInfo
def strip_leading_v(version_str: str):
"""Strip leading v of a version which is not SemVer compatible."""
return version_str[1:] if version_str.startswith("v") else version_str
def main(args):
"""Entry point."""
version = args.version
latest = args.latest
prerelease = args.prerelease
if not VersionInfo.isvalid(strip_leading_v(version)):
message = f"Invalid version: {version}"
raise RuntimeError(message)
version_json_file_path = Path(args.versions_json_file).resolve()
try:
with open(version_json_file_path, "r", encoding="utf-8") as f:
version_json = json.loads(f.read())
except JSONDecodeError as err:
message = f"An error occurred while trying to load {str(version_json_file_path)}"
raise RuntimeError(message) from err
# Version json is composed by:
# all: list of all published versions
# menu: list of all available versions (if any entry is not included in "all",
# warning banner with DEV/PRE-RELEASE doc warning will be displayed)
# latest: latest version, if current doc != latest, warning banner is displayed
if "version" not in version_json["menu"]:
version_json["menu"].append(version)
if not prerelease:
version_json["all"].append(version)
if latest:
version_json["latest"] = version
print(version_json)
output_json_path = Path(args.output_json).resolve()
with open(output_json_path, "w", encoding="utf-8") as f:
json.dump(version_json, f, indent=4)
if __name__ == "__main__":
parser = argparse.ArgumentParser("versions.json generator", allow_abbrev=False)
parser.add_argument(
"--add-version",
type=str,
required=True,
dest="version",
help="A single versions to add to versions.json. "
"The link will point to '$VERSION/' and will have text '$VERSION'.",
)
parser.add_argument(
"--versions-json-file", type=str, required=True, help="Path to the versions.json to update."
)
parser.add_argument(
"--prerelease",
action="store_true",
dest="prerelease",
help="set this version as a pre-release documentation.",
)
parser.add_argument(
"--latest",
action="store_true",
dest="latest",
help="set this version as latest available documentation.",
)
parser.add_argument("--output-json", type=str, required=True, help="Output file path.")
cli_args = parser.parse_args()
main(cli_args)

View File

@@ -0,0 +1,61 @@
#!/bin/bash
TOKEN=
ORG_REPO=
EVENTS_TO_CHECK=
while [ -n "$1" ]
do
case "$1" in
"--token" )
shift
TOKEN="$1"
;;
"--org-repo" )
shift
ORG_REPO="$1"
;;
"--event-types" )
shift
EVENTS_TO_CHECK="$1"
;;
*)
echo "Unknown param : $1"
exit 1
;;
esac
shift
done
# Store the workflows that come in jsons in a file per event type
declare -a JSON_FILES_ARRAY=()
for EVENT in $EVENTS_TO_CHECK; do
CURR_FILE="$(mktemp --suffix=.json)"
curl \
-X GET \
-H "Accept: application/vnd.github.v3+json" \
-H "Authorization: token ${TOKEN}" \
"https://api.github.com/repos/${ORG_REPO}/actions/runs?branch=main&event=${EVENT}&status=success" | \
jq -rc '.workflow_runs | sort_by(.updated_at)[-1]' > "${CURR_FILE}"
JSON_FILES_ARRAY+=("${CURR_FILE}")
done
# Put all the workflows in the same json and dump that
CONCAT_FILE="$(mktemp --suffix=.json)"
jq -sr '.' "${JSON_FILES_ARRAY[@]}" > "${CONCAT_FILE}"
# Sort by updated_at, get the last and get the sha1 for this last one
BEFORE_SHA=$(jq -rc 'sort_by(.updated_at)[-1].head_sha' "${CONCAT_FILE}")
# Remove files
rm "${CONCAT_FILE}"
for FILE_TO_RM in "${JSON_FILES_ARRAY[@]}"; do
rm "${FILE_TO_RM}"
done
# Echo for the outside world
echo "${BEFORE_SHA}"

View File

@@ -0,0 +1,65 @@
"""Script to parse output of pip-audit"""
import argparse
import json
import sys
from pathlib import Path
from typing import List
def format_vulnerability(pkg_name, pkg_version, vuln_info: dict) -> List[str]:
"""Format a vulnerability info."""
vuln_strs = [
f"{pkg_name}({pkg_version}) - ID: {vuln['id']} "
f"fixed in {', '.join(vuln['fix_versions'])}"
for vuln in vuln_info
]
return vuln_strs
# Cannot have a backslash in f-string, so create a constant for newline
NEW_LINE = "\n"
def main(args):
"""Entry point"""
vulns_json_path = Path(args.vulns_json).resolve()
json_content = []
with open(vulns_json_path, "r", encoding="utf-8") as f:
json_content.extend(f.readlines())
report_path = Path(args.vulns_report).resolve()
with open(report_path, "w", encoding="utf-8") as report:
if json_content:
report.write("Found the following vulnerabilities:\n")
assert len(json_content) == 1
json_data = json.loads(json_content[0])
for entry in json_data:
vuln_entries = entry.get("vulns", [])
if vuln_entries:
formatted_vulns = format_vulnerability(
entry["name"], entry["version"], vuln_entries
)
report.write(f"- {f'{NEW_LINE}- '.join(formatted_vulns)}\n")
sys.exit(1)
else:
report.write("No vulnerabilities found.\n")
if __name__ == "__main__":
parser = argparse.ArgumentParser("pip-audit output parser", allow_abbrev=False)
parser.add_argument(
"--vulns-json", type=str, required=True, help="The path to the pip-audit json output"
)
parser.add_argument(
"--vulns-report",
type=str,
required=True,
help="Path to the file to which to write the vulneratbility report",
)
cli_args = parser.parse_args()
main(cli_args)