mirror of
https://github.com/zama-ai/concrete.git
synced 2026-02-08 19:44:57 -05:00
chore: Move to the mono repo layout
This commit is contained in:
@@ -0,0 +1,5 @@
|
||||
## Summary
|
||||
|
||||
_Please fill here with information about the main features in this release, or the main reason for having a delivery (e.g., fixing an annoying bug)_
|
||||
|
||||
## Links
|
||||
@@ -0,0 +1,40 @@
|
||||
"""Helper script for github actions to combine job statuses"""
|
||||
import argparse
|
||||
import json
|
||||
|
||||
RESULTS_TO_DISPLAY_LEVEL = {
|
||||
"failure": 0,
|
||||
"cancelled": 1,
|
||||
"success": 2,
|
||||
"skipped": 3,
|
||||
}
|
||||
|
||||
DISPLAY_LEVEL_TO_RESULTS = {val: key for key, val in RESULTS_TO_DISPLAY_LEVEL.items()}
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point"""
|
||||
|
||||
need_context_data = None
|
||||
with open(args.needs_context_json, encoding="utf-8") as f:
|
||||
need_context_data = json.load(f)
|
||||
|
||||
display_level = min(
|
||||
RESULTS_TO_DISPLAY_LEVEL[job_object["result"]] for job_object in need_context_data.values()
|
||||
)
|
||||
|
||||
print(DISPLAY_LEVEL_TO_RESULTS[display_level])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("Combine github actions statuses", allow_abbrev=False)
|
||||
|
||||
parser.add_argument(
|
||||
"--needs_context_json",
|
||||
type=str,
|
||||
help="Pass the json file path containing the workflow needs context",
|
||||
)
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
|
||||
main(cli_args)
|
||||
12
frontends/concrete-python/script/actions_utils/coverage.sh
Executable file
12
frontends/concrete-python/script/actions_utils/coverage.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -o pipefail
|
||||
set +e
|
||||
|
||||
CURR_DIR=$(dirname "$0")
|
||||
|
||||
# Format diff-coverage.txt for PR comment
|
||||
poetry run python "$CURR_DIR"/coverage_report_format.py \
|
||||
global-coverage \
|
||||
--global-coverage-json-file "$1" \
|
||||
--global-coverage-output-file diff-coverage.txt
|
||||
74
frontends/concrete-python/script/actions_utils/coverage_report_format.py
Executable file
74
frontends/concrete-python/script/actions_utils/coverage_report_format.py
Executable file
@@ -0,0 +1,74 @@
|
||||
"""Helper script for github actions"""
|
||||
import argparse
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def write_coverage_file(coverage_file_path: Path, exit_code: int, coverage_content):
|
||||
"""Write the formatted coverage to file."""
|
||||
with open(coverage_file_path, "w", encoding="utf-8") as f:
|
||||
if exit_code == 0:
|
||||
f.write("## Coverage passed ✅\n\n")
|
||||
else:
|
||||
f.write("## Coverage failed ❌\n\n")
|
||||
|
||||
# Open collapsible section
|
||||
f.write("<details><summary>Coverage details</summary>\n<p>\n\n")
|
||||
f.write("```\n")
|
||||
|
||||
f.writelines(coverage_content)
|
||||
|
||||
# Close collapsible section
|
||||
f.write("```\n\n")
|
||||
f.write("</p>\n</details>\n\n")
|
||||
|
||||
|
||||
def diff_coverage(args):
|
||||
"""diff-coverage entry point."""
|
||||
diff_cover_file_path = Path(args.diff_cover_output).resolve()
|
||||
diff_cover_content = None
|
||||
|
||||
with open(diff_cover_file_path, "r", encoding="utf-8") as f:
|
||||
diff_cover_content = f.readlines()
|
||||
|
||||
write_coverage_file(diff_cover_file_path, args.diff_cover_exit_code, diff_cover_content)
|
||||
|
||||
|
||||
def global_coverage(args):
|
||||
"""global-coverage entry point."""
|
||||
global_coverage_json_path = Path(args.global_coverage_json_file).resolve()
|
||||
global_coverage_infos = None
|
||||
with open(global_coverage_json_path, "r", encoding="utf-8") as f:
|
||||
global_coverage_infos = json.load(f)
|
||||
|
||||
exit_code = global_coverage_infos["exit_code"]
|
||||
coverage_content = global_coverage_infos["content"]
|
||||
global_coverage_output_file_path = Path(args.global_coverage_output_file).resolve()
|
||||
write_coverage_file(global_coverage_output_file_path, exit_code, coverage_content)
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point"""
|
||||
args.entry_point(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main_parser = argparse.ArgumentParser(allow_abbrev=False)
|
||||
|
||||
sub_parsers = main_parser.add_subparsers(dest="sub-command", required=True)
|
||||
|
||||
parser_diff_coverage = sub_parsers.add_parser("diff-coverage")
|
||||
|
||||
parser_diff_coverage.add_argument("--diff-cover-exit-code", type=int, required=True)
|
||||
parser_diff_coverage.add_argument("--diff-cover-output", type=str, required=True)
|
||||
parser_diff_coverage.set_defaults(entry_point=diff_coverage)
|
||||
|
||||
parser_global_coverage = sub_parsers.add_parser("global-coverage")
|
||||
|
||||
parser_global_coverage.add_argument("--global-coverage-output-file", type=str, required=True)
|
||||
parser_global_coverage.add_argument("--global-coverage-json-file", type=str, required=True)
|
||||
parser_global_coverage.set_defaults(entry_point=global_coverage)
|
||||
|
||||
cli_args = main_parser.parse_args()
|
||||
|
||||
main(cli_args)
|
||||
@@ -0,0 +1,95 @@
|
||||
"""Script to generate custom GitHub actions test matrices."""
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
WEEKLY = "weekly"
|
||||
RELEASE = "release"
|
||||
PR = "pr"
|
||||
PUSH_TO_MAIN = "push_to_main"
|
||||
|
||||
LINUX = "linux"
|
||||
MACOS = "macos"
|
||||
|
||||
OSES = {LINUX, MACOS}
|
||||
|
||||
PR_OSES = {LINUX: "ubuntu-22.04"}
|
||||
PR_PYTHON_VERSIONS = ["3.7"]
|
||||
PR_CONF = {"os": PR_OSES, "python": PR_PYTHON_VERSIONS}
|
||||
|
||||
PUSH_TO_MAIN_OSES = {LINUX: "ubuntu-22.04"}
|
||||
PUSH_TO_MAIN_PYTHON_VERSIONS = ["3.7"]
|
||||
PUSH_TO_MAIN_CONF = {"os": PUSH_TO_MAIN_OSES, "python": PUSH_TO_MAIN_PYTHON_VERSIONS}
|
||||
|
||||
WEEKLY_OSES = {
|
||||
LINUX: "ubuntu-22.04",
|
||||
MACOS: "macos-11",
|
||||
}
|
||||
WEEKLY_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"]
|
||||
WEEKLY_CONF = {"os": WEEKLY_OSES, "python": WEEKLY_PYTHON_VERSIONS}
|
||||
|
||||
# The OSes here are to indicate the OSes used for runners during release
|
||||
RELEASE_OSES = {
|
||||
LINUX: "ubuntu-22.04",
|
||||
# TODO: https://github.com/zama-ai/concrete-numpy-internal/issues/1340
|
||||
# Re-enable macOS for release once we have the duration of the tests
|
||||
# MACOS: "macos-10.15",
|
||||
}
|
||||
# The python versions will be used to build packages during release
|
||||
RELEASE_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"]
|
||||
RELEASE_CONF = {"os": RELEASE_OSES, "python": RELEASE_PYTHON_VERSIONS}
|
||||
|
||||
CONFIGURATIONS = {
|
||||
PR: PR_CONF,
|
||||
WEEKLY: WEEKLY_CONF,
|
||||
RELEASE: RELEASE_CONF,
|
||||
PUSH_TO_MAIN: PUSH_TO_MAIN_CONF,
|
||||
}
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point."""
|
||||
|
||||
matrix_conf = CONFIGURATIONS[args.build_type]
|
||||
|
||||
github_action_matrix = []
|
||||
|
||||
for (os_kind, os_name), python_version in itertools.product(
|
||||
matrix_conf["os"].items(), matrix_conf["python"]
|
||||
):
|
||||
github_action_matrix.append(
|
||||
{
|
||||
"os_kind": os_kind,
|
||||
"runs_on": os_name,
|
||||
"python_version": python_version,
|
||||
}
|
||||
)
|
||||
|
||||
print(json.dumps(github_action_matrix, indent=4))
|
||||
|
||||
output_json_path = Path(args.output_json).resolve()
|
||||
|
||||
with open(output_json_path, "w", encoding="utf-8") as f:
|
||||
json.dump(github_action_matrix, f)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("Generate GHA test matrices", allow_abbrev=False)
|
||||
|
||||
parser.add_argument(
|
||||
"--build-type",
|
||||
type=str,
|
||||
required=True,
|
||||
choices=[WEEKLY, RELEASE, PR, PUSH_TO_MAIN],
|
||||
help="The type of build for which the matrix generation is required",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--output-json", type=str, required=True, help="Where to output the matrix as json data"
|
||||
)
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
|
||||
main(cli_args)
|
||||
@@ -0,0 +1,163 @@
|
||||
"""Tool to manage the versions.html file at the root of our docs sites."""
|
||||
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4.element import Tag
|
||||
from semver import VersionInfo
|
||||
|
||||
VERSIONS_LIST_ID = "versions-list"
|
||||
|
||||
|
||||
def strip_leading_v(version_str: str):
|
||||
"""Strip leading v of a version which is not SemVer compatible."""
|
||||
return version_str[1:] if version_str.startswith("v") else version_str
|
||||
|
||||
|
||||
def create_list_element(soup: BeautifulSoup, contents: Tag) -> Tag:
|
||||
"""Create a list element for links.
|
||||
|
||||
Args:
|
||||
soup (BeautifulSoup): The soup to use to create the tag.
|
||||
|
||||
Returns:
|
||||
Tag: tag containing <li class="toctree-l1"></li>.
|
||||
"""
|
||||
new_list_element = soup.new_tag("li", **{"class": "toctree-l1"})
|
||||
new_list_element.contents.append(contents)
|
||||
return new_list_element
|
||||
|
||||
|
||||
def create_link_tag_set_string(soup: BeautifulSoup, version_string: str) -> Tag:
|
||||
"""Create a link tag on the given soup to version specified by version_string.
|
||||
|
||||
Args:
|
||||
soup (BeautifulSoup): The soup to use to create the tag.
|
||||
version_string (str): The version string to use.
|
||||
|
||||
Returns:
|
||||
Tag: tag containing <a class="reference internal" href="0.1.0/">{version_string}</a>.
|
||||
"""
|
||||
new_tag = soup.new_tag(
|
||||
"a",
|
||||
**{
|
||||
"href": f"{version_string}/",
|
||||
"class": "reference internal",
|
||||
},
|
||||
)
|
||||
|
||||
new_tag.string = version_string
|
||||
return new_tag
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point."""
|
||||
|
||||
invalid_versions = [
|
||||
version
|
||||
for version in args.add_versions
|
||||
if not VersionInfo.isvalid(strip_leading_v(version))
|
||||
]
|
||||
if len(invalid_versions) > 0:
|
||||
message = f"Found invalid versions: {invalid_versions}"
|
||||
raise RuntimeError(message)
|
||||
|
||||
version_html = None
|
||||
version_html_file_path = Path(args.versions_html_file).resolve()
|
||||
with open(version_html_file_path, "r", encoding="utf-8") as f:
|
||||
version_html = BeautifulSoup(f, "html.parser")
|
||||
|
||||
if version_html is None:
|
||||
message = f"An error occured while trying to load {str(version_html_file_path)}"
|
||||
raise RuntimeError(message)
|
||||
|
||||
print(version_html)
|
||||
|
||||
version_list = version_html.find(id=VERSIONS_LIST_ID)
|
||||
if version_list is None or version_list.name != "ul":
|
||||
message = f"Could not find <ul> tag with id {VERSIONS_LIST_ID}"
|
||||
raise RuntimeError(message)
|
||||
|
||||
non_semver_versions = {}
|
||||
semver_versions = {}
|
||||
for list_entry in version_list.find_all("li"):
|
||||
version_tags = []
|
||||
version_is_valid_semver = False
|
||||
for potential_version_tag in list_entry.contents:
|
||||
if not isinstance(potential_version_tag, Tag):
|
||||
continue
|
||||
version_is_valid_semver = VersionInfo.isvalid(
|
||||
strip_leading_v(potential_version_tag.string)
|
||||
)
|
||||
version_tags.append(potential_version_tag.string)
|
||||
|
||||
num_version_tags = len(version_tags)
|
||||
assert num_version_tags == 1, f"Can only have 1 version tag, got {num_version_tags}"
|
||||
|
||||
version_tag = version_tags[0]
|
||||
|
||||
if version_is_valid_semver:
|
||||
semver_versions[version_tag.string] = list_entry
|
||||
else:
|
||||
non_semver_versions[version_tag.string] = list_entry
|
||||
|
||||
parsed_versions = [VersionInfo.parse(version) for version in args.add_versions]
|
||||
|
||||
versions_already_in_html = set(parsed_versions).intersection(semver_versions.keys())
|
||||
if len(versions_already_in_html) > 0:
|
||||
message = (
|
||||
"The following versions are already in the html: "
|
||||
f"{', '.join(str(ver) for ver in sorted(versions_already_in_html))}"
|
||||
)
|
||||
raise RuntimeError(message)
|
||||
|
||||
semver_versions.update(
|
||||
(
|
||||
parsed_version,
|
||||
create_list_element(
|
||||
version_html, create_link_tag_set_string(version_html, str(parsed_version))
|
||||
),
|
||||
)
|
||||
for parsed_version in parsed_versions
|
||||
)
|
||||
|
||||
version_list.contents = []
|
||||
for sorted_non_semver_version in sorted(non_semver_versions.keys()):
|
||||
version_list.contents.append(non_semver_versions[sorted_non_semver_version])
|
||||
|
||||
# We want the most recent versions at the top
|
||||
for sorted_semver_version in sorted(semver_versions.keys(), reverse=True):
|
||||
version_list.contents.append(semver_versions[sorted_semver_version])
|
||||
|
||||
pretty_output = version_html.prettify()
|
||||
print(pretty_output)
|
||||
|
||||
output_html_path = Path(args.output_html).resolve()
|
||||
with open(output_html_path, "w", encoding="utf-8") as f:
|
||||
f.write(pretty_output)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("versions.html generator", allow_abbrev=False)
|
||||
|
||||
parser.add_argument(
|
||||
"--add-versions",
|
||||
type=str,
|
||||
required=True,
|
||||
nargs="+",
|
||||
help="A list of versions to add to versions.html. "
|
||||
"The links will be sorted by versions with stable/main as the first entry. "
|
||||
"The link will point to '$VERSION/' and will have text '$VERSION'.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--versions-html-file",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to the versions.html to update. "
|
||||
'It must have a <li> tag with id="versions-list".',
|
||||
)
|
||||
parser.add_argument("--output-html", type=str, required=True, help="Output file path.")
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
main(cli_args)
|
||||
@@ -0,0 +1,81 @@
|
||||
"""Tool to manage the versions.json file at the root of our docs sites."""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from json.decoder import JSONDecodeError
|
||||
from pathlib import Path
|
||||
|
||||
from semver import VersionInfo
|
||||
|
||||
|
||||
def strip_leading_v(version_str: str):
|
||||
"""Strip leading v of a version which is not SemVer compatible."""
|
||||
return version_str[1:] if version_str.startswith("v") else version_str
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point."""
|
||||
version = args.version
|
||||
latest = args.latest
|
||||
prerelease = args.prerelease
|
||||
|
||||
if not VersionInfo.isvalid(strip_leading_v(version)):
|
||||
message = f"Invalid version: {version}"
|
||||
raise RuntimeError(message)
|
||||
|
||||
version_json_file_path = Path(args.versions_json_file).resolve()
|
||||
try:
|
||||
with open(version_json_file_path, "r", encoding="utf-8") as f:
|
||||
version_json = json.loads(f.read())
|
||||
except JSONDecodeError as err:
|
||||
message = f"An error occurred while trying to load {str(version_json_file_path)}"
|
||||
raise RuntimeError(message) from err
|
||||
|
||||
# Version json is composed by:
|
||||
# all: list of all published versions
|
||||
# menu: list of all available versions (if any entry is not included in "all",
|
||||
# warning banner with DEV/PRE-RELEASE doc warning will be displayed)
|
||||
# latest: latest version, if current doc != latest, warning banner is displayed
|
||||
if "version" not in version_json["menu"]:
|
||||
version_json["menu"].append(version)
|
||||
if not prerelease:
|
||||
version_json["all"].append(version)
|
||||
if latest:
|
||||
version_json["latest"] = version
|
||||
|
||||
print(version_json)
|
||||
output_json_path = Path(args.output_json).resolve()
|
||||
with open(output_json_path, "w", encoding="utf-8") as f:
|
||||
json.dump(version_json, f, indent=4)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("versions.json generator", allow_abbrev=False)
|
||||
|
||||
parser.add_argument(
|
||||
"--add-version",
|
||||
type=str,
|
||||
required=True,
|
||||
dest="version",
|
||||
help="A single versions to add to versions.json. "
|
||||
"The link will point to '$VERSION/' and will have text '$VERSION'.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--versions-json-file", type=str, required=True, help="Path to the versions.json to update."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--prerelease",
|
||||
action="store_true",
|
||||
dest="prerelease",
|
||||
help="set this version as a pre-release documentation.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--latest",
|
||||
action="store_true",
|
||||
dest="latest",
|
||||
help="set this version as latest available documentation.",
|
||||
)
|
||||
parser.add_argument("--output-json", type=str, required=True, help="Output file path.")
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
main(cli_args)
|
||||
61
frontends/concrete-python/script/actions_utils/get_latest_run.sh
Executable file
61
frontends/concrete-python/script/actions_utils/get_latest_run.sh
Executable file
@@ -0,0 +1,61 @@
|
||||
#!/bin/bash
|
||||
|
||||
TOKEN=
|
||||
ORG_REPO=
|
||||
EVENTS_TO_CHECK=
|
||||
|
||||
while [ -n "$1" ]
|
||||
do
|
||||
case "$1" in
|
||||
"--token" )
|
||||
shift
|
||||
TOKEN="$1"
|
||||
;;
|
||||
|
||||
"--org-repo" )
|
||||
shift
|
||||
ORG_REPO="$1"
|
||||
;;
|
||||
|
||||
"--event-types" )
|
||||
shift
|
||||
EVENTS_TO_CHECK="$1"
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown param : $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Store the workflows that come in jsons in a file per event type
|
||||
declare -a JSON_FILES_ARRAY=()
|
||||
for EVENT in $EVENTS_TO_CHECK; do
|
||||
CURR_FILE="$(mktemp --suffix=.json)"
|
||||
curl \
|
||||
-X GET \
|
||||
-H "Accept: application/vnd.github.v3+json" \
|
||||
-H "Authorization: token ${TOKEN}" \
|
||||
"https://api.github.com/repos/${ORG_REPO}/actions/runs?branch=main&event=${EVENT}&status=success" | \
|
||||
jq -rc '.workflow_runs | sort_by(.updated_at)[-1]' > "${CURR_FILE}"
|
||||
JSON_FILES_ARRAY+=("${CURR_FILE}")
|
||||
done
|
||||
|
||||
# Put all the workflows in the same json and dump that
|
||||
CONCAT_FILE="$(mktemp --suffix=.json)"
|
||||
jq -sr '.' "${JSON_FILES_ARRAY[@]}" > "${CONCAT_FILE}"
|
||||
|
||||
# Sort by updated_at, get the last and get the sha1 for this last one
|
||||
BEFORE_SHA=$(jq -rc 'sort_by(.updated_at)[-1].head_sha' "${CONCAT_FILE}")
|
||||
|
||||
# Remove files
|
||||
rm "${CONCAT_FILE}"
|
||||
|
||||
for FILE_TO_RM in "${JSON_FILES_ARRAY[@]}"; do
|
||||
rm "${FILE_TO_RM}"
|
||||
done
|
||||
|
||||
# Echo for the outside world
|
||||
echo "${BEFORE_SHA}"
|
||||
@@ -0,0 +1,65 @@
|
||||
"""Script to parse output of pip-audit"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
|
||||
def format_vulnerability(pkg_name, pkg_version, vuln_info: dict) -> List[str]:
|
||||
"""Format a vulnerability info."""
|
||||
|
||||
vuln_strs = [
|
||||
f"{pkg_name}({pkg_version}) - ID: {vuln['id']} "
|
||||
f"fixed in {', '.join(vuln['fix_versions'])}"
|
||||
for vuln in vuln_info
|
||||
]
|
||||
return vuln_strs
|
||||
|
||||
|
||||
# Cannot have a backslash in f-string, so create a constant for newline
|
||||
NEW_LINE = "\n"
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point"""
|
||||
|
||||
vulns_json_path = Path(args.vulns_json).resolve()
|
||||
json_content = []
|
||||
with open(vulns_json_path, "r", encoding="utf-8") as f:
|
||||
json_content.extend(f.readlines())
|
||||
|
||||
report_path = Path(args.vulns_report).resolve()
|
||||
with open(report_path, "w", encoding="utf-8") as report:
|
||||
if json_content:
|
||||
report.write("Found the following vulnerabilities:\n")
|
||||
assert len(json_content) == 1
|
||||
json_data = json.loads(json_content[0])
|
||||
for entry in json_data:
|
||||
vuln_entries = entry.get("vulns", [])
|
||||
if vuln_entries:
|
||||
formatted_vulns = format_vulnerability(
|
||||
entry["name"], entry["version"], vuln_entries
|
||||
)
|
||||
report.write(f"- {f'{NEW_LINE}- '.join(formatted_vulns)}\n")
|
||||
sys.exit(1)
|
||||
else:
|
||||
report.write("No vulnerabilities found.\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("pip-audit output parser", allow_abbrev=False)
|
||||
|
||||
parser.add_argument(
|
||||
"--vulns-json", type=str, required=True, help="The path to the pip-audit json output"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--vulns-report",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Path to the file to which to write the vulneratbility report",
|
||||
)
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
main(cli_args)
|
||||
@@ -0,0 +1,75 @@
|
||||
"""Update list of supported functions in the doc."""
|
||||
|
||||
import argparse
|
||||
|
||||
from concrete.numpy.tracing import Tracer
|
||||
|
||||
|
||||
def main(file_to_update):
|
||||
"""Update list of supported functions in file_to_update"""
|
||||
f_names = sorted(f.__name__.replace("_", "\\_") for f in Tracer.SUPPORTED_NUMPY_OPERATORS)
|
||||
supported_func = [
|
||||
f"[np.{f}](https://numpy.org/doc/stable/reference/generated/numpy.{f}.html)"
|
||||
for f in f_names
|
||||
]
|
||||
|
||||
with open(file_to_update, "r", encoding="utf-8") as file:
|
||||
lines = file.readlines()
|
||||
|
||||
newlines = []
|
||||
keep_line = True
|
||||
|
||||
for line in lines:
|
||||
if line.startswith(
|
||||
"<!--- gen_supported_ufuncs.py: inject supported operations [BEGIN] -->"
|
||||
):
|
||||
keep_line = False
|
||||
newlines.append(line)
|
||||
newlines.append(
|
||||
"<!--- do not edit, auto generated part by "
|
||||
"`python3 gen_supported_ufuncs.py` in docker -->\n"
|
||||
)
|
||||
elif line.startswith(
|
||||
"<!--- do not edit, auto generated part by "
|
||||
"`python3 gen_supported_ufuncs.py` in docker -->"
|
||||
):
|
||||
pass
|
||||
elif line.startswith(
|
||||
"<!--- gen_supported_ufuncs.py: inject supported operations [END] -->"
|
||||
):
|
||||
keep_line = True
|
||||
|
||||
# Inject the supported functions
|
||||
newlines.extend(f"* {f}\n" for f in supported_func)
|
||||
|
||||
newlines.append(line)
|
||||
else:
|
||||
assert "gen_supported_ufuncs.py" not in line, (
|
||||
f"Error: not expected to have 'gen_supported_ufuncs.py' at line {line} "
|
||||
f"of {file_to_update}"
|
||||
)
|
||||
|
||||
if keep_line:
|
||||
newlines.append(line)
|
||||
|
||||
if args.check:
|
||||
|
||||
with open(file_to_update, "r", encoding="utf-8") as file:
|
||||
oldlines = file.readlines()
|
||||
|
||||
assert (
|
||||
oldlines == newlines
|
||||
), "List of supported functions is not up to date. Please run `make supported_functions`."
|
||||
|
||||
else:
|
||||
with open(file_to_update, "w", encoding="utf-8") as file:
|
||||
file.writelines(newlines)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Update list of supported functions in the doc")
|
||||
parser.add_argument("--check", action="store_true", help="flag to enable just checking mode")
|
||||
|
||||
parser.add_argument("file_to_update", type=str, help=".md file to update")
|
||||
args = parser.parse_args()
|
||||
main(args.file_to_update)
|
||||
251
frontends/concrete-python/script/make_utils/changelog_helper.py
Normal file
251
frontends/concrete-python/script/make_utils/changelog_helper.py
Normal file
@@ -0,0 +1,251 @@
|
||||
"""Tool to bypass the insane logic of semantic-release and generate changelogs we want"""
|
||||
|
||||
import argparse
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import deque
|
||||
|
||||
from git.repo import Repo
|
||||
from semantic_release.changelog import markdown_changelog
|
||||
from semantic_release.errors import UnknownCommitMessageStyleError
|
||||
from semantic_release.settings import config, current_commit_parser
|
||||
from semantic_release.vcs_helpers import get_repository_owner_and_name
|
||||
from semver import VersionInfo
|
||||
|
||||
|
||||
def log_msg(*args, file=sys.stderr, **kwargs):
|
||||
"""Shortcut to print to sys.stderr."""
|
||||
print(*args, file=file, **kwargs)
|
||||
|
||||
|
||||
def strip_leading_v(version_str: str):
|
||||
"""Strip leading v of a version which is not SemVer compatible."""
|
||||
return version_str[1:] if version_str.startswith("v") else version_str
|
||||
|
||||
|
||||
def get_poetry_project_version() -> VersionInfo:
|
||||
"""Run poetry version and get the project version"""
|
||||
command = ["poetry", "version"]
|
||||
poetry_version_output = subprocess.check_output(command, text=True)
|
||||
return version_string_to_version_info(poetry_version_output.split(" ")[1])
|
||||
|
||||
|
||||
def raise_exception_or_print_warning(is_error: bool, message_body: str):
|
||||
"""Raise an exception if is_error is true else print a warning to stderr"""
|
||||
msg_start = "Error" if is_error else "Warning"
|
||||
msg = f"{msg_start}: {message_body}"
|
||||
if is_error:
|
||||
raise RuntimeError(msg)
|
||||
log_msg(msg)
|
||||
|
||||
|
||||
def version_string_to_version_info(version_string: str) -> VersionInfo:
|
||||
"""Convert git tag to VersionInfo."""
|
||||
return VersionInfo.parse(strip_leading_v(version_string))
|
||||
|
||||
|
||||
def generate_changelog(repo: Repo, from_commit_excluded: str, to_commit_included: str) -> dict:
|
||||
"""Recreate the functionality from semantic release with the from and to commits.
|
||||
|
||||
Args:
|
||||
repo (Repo): the gitpython Repo object representing your git repository
|
||||
from_commit_excluded (str): the commit after which we want to collect commit messages for
|
||||
the changelog
|
||||
to_commit_included (str): the last commit included in the collected commit messages for the
|
||||
changelog.
|
||||
|
||||
Returns:
|
||||
dict: the same formatted dict as the generate_changelog from semantic-release
|
||||
"""
|
||||
# Additional sections will be added as new types are encountered
|
||||
changes: dict = {"breaking": []}
|
||||
|
||||
rev = f"{from_commit_excluded}...{to_commit_included}"
|
||||
|
||||
for commit in repo.iter_commits(rev):
|
||||
hash_ = commit.hexsha
|
||||
commit_message = (
|
||||
commit.message.replace("\r\n", "\n")
|
||||
if isinstance(commit.message, str)
|
||||
else commit.message.replace(b"\r\n", b"\n")
|
||||
)
|
||||
try:
|
||||
message = current_commit_parser()(commit_message)
|
||||
if message.type not in changes:
|
||||
log_msg(f"Creating new changelog section for {message.type} ")
|
||||
changes[message.type] = []
|
||||
|
||||
# Capitalize the first letter of the message, leaving others as they were
|
||||
# (using str.capitalize() would make the other letters lowercase)
|
||||
formatted_message = message.descriptions[0][0].upper() + message.descriptions[0][1:]
|
||||
if config.get("changelog_capitalize") is False:
|
||||
formatted_message = message.descriptions[0]
|
||||
|
||||
# By default, feat(x): description shows up in changelog with the
|
||||
# scope bolded, like:
|
||||
#
|
||||
# * **x**: description
|
||||
if config.get("changelog_scope") and message.scope:
|
||||
formatted_message = f"**{message.scope}:** {formatted_message}"
|
||||
|
||||
changes[message.type].append((hash_, formatted_message))
|
||||
|
||||
if message.breaking_descriptions:
|
||||
# Copy breaking change descriptions into changelog
|
||||
for paragraph in message.breaking_descriptions:
|
||||
changes["breaking"].append((hash_, paragraph))
|
||||
elif message.bump == 3:
|
||||
# Major, but no breaking descriptions, use commit subject instead
|
||||
changes["breaking"].append((hash_, message.descriptions[0]))
|
||||
|
||||
except UnknownCommitMessageStyleError as err:
|
||||
log_msg(f"Ignoring UnknownCommitMessageStyleError: {err}")
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point"""
|
||||
|
||||
repo = Repo(args.repo_root)
|
||||
|
||||
sha1_to_tags = {tag.commit.hexsha: tag for tag in repo.tags}
|
||||
|
||||
to_commit = repo.commit(args.to_ref)
|
||||
log_msg(f"To commit: {to_commit}")
|
||||
|
||||
to_tag = sha1_to_tags.get(to_commit.hexsha, None)
|
||||
if to_tag is None:
|
||||
raise_exception_or_print_warning(
|
||||
is_error=args.to_ref_must_have_tag,
|
||||
message_body=f"to-ref {args.to_ref} has no tag associated to it",
|
||||
)
|
||||
|
||||
to_version = (
|
||||
get_poetry_project_version()
|
||||
if to_tag is None
|
||||
else version_string_to_version_info(to_tag.name)
|
||||
)
|
||||
log_msg(f"Project version {to_version} taken from tag: {to_tag is not None}")
|
||||
|
||||
from_commit = None
|
||||
if args.from_ref is None:
|
||||
tags_by_name = {strip_leading_v(tag.name): tag for tag in repo.tags}
|
||||
version_infos = {
|
||||
VersionInfo.parse(tag_name): tag_name
|
||||
for tag_name in tags_by_name
|
||||
if VersionInfo.isvalid(tag_name)
|
||||
}
|
||||
all_release_version_infos = {
|
||||
version_info: tags_by_name[tag_name]
|
||||
for version_info, tag_name in version_infos.items()
|
||||
if version_info.prerelease is None
|
||||
}
|
||||
log_msg(f"All release versions {all_release_version_infos}")
|
||||
|
||||
versions_before_project_version = [
|
||||
version_info for version_info in all_release_version_infos if version_info < to_version
|
||||
]
|
||||
if len(versions_before_project_version) > 0:
|
||||
highest_version_before_current_version = max(versions_before_project_version)
|
||||
highest_version_tag = all_release_version_infos[highest_version_before_current_version]
|
||||
from_commit = highest_version_tag.commit
|
||||
else:
|
||||
# No versions before, get the initial commit reachable from to_commit
|
||||
# from https://stackoverflow.com/a/48232574
|
||||
last_element_extractor = deque(repo.iter_commits(to_commit), 1)
|
||||
from_commit = last_element_extractor.pop()
|
||||
else:
|
||||
from_commit = repo.commit(args.from_ref)
|
||||
|
||||
log_msg(f"From commit: {from_commit}")
|
||||
ancestor_commit = repo.merge_base(to_commit, from_commit)
|
||||
assert len(ancestor_commit) == 1
|
||||
ancestor_commit = ancestor_commit[0]
|
||||
log_msg(f"Common ancestor: {ancestor_commit}")
|
||||
|
||||
if ancestor_commit != from_commit:
|
||||
do_not_change_from_ref = args.do_not_change_from_ref and args.from_ref is not None
|
||||
raise_exception_or_print_warning(
|
||||
is_error=do_not_change_from_ref,
|
||||
message_body=(
|
||||
f"the ancestor {ancestor_commit} for {from_commit} and {to_commit} "
|
||||
f"is not the same commit as the commit for '--from-ref' {from_commit}."
|
||||
),
|
||||
)
|
||||
|
||||
ancestor_tag = sha1_to_tags.get(ancestor_commit.hexsha, None)
|
||||
if ancestor_tag is None:
|
||||
raise_exception_or_print_warning(
|
||||
is_error=args.ancestor_must_have_tag,
|
||||
message_body=(
|
||||
f"the ancestor {ancestor_commit} for " f"{from_commit} and {to_commit} has no tag"
|
||||
),
|
||||
)
|
||||
|
||||
ancestor_version_str = (
|
||||
None if ancestor_tag is None else str(version_string_to_version_info(ancestor_tag.name))
|
||||
)
|
||||
|
||||
log_msg(
|
||||
f"Collecting commits from \n{ancestor_commit} "
|
||||
f"(tag: {ancestor_tag} - parsed version "
|
||||
f"{str(ancestor_version_str)}) to \n{to_commit} "
|
||||
f"(tag: {to_tag} - parsed version {str(to_version)})"
|
||||
)
|
||||
|
||||
log_dict = generate_changelog(repo, ancestor_commit.hexsha, to_commit.hexsha)
|
||||
|
||||
owner, name = get_repository_owner_and_name()
|
||||
md_changelog = markdown_changelog(
|
||||
owner,
|
||||
name,
|
||||
str(to_version),
|
||||
log_dict,
|
||||
header=True,
|
||||
previous_version=ancestor_version_str,
|
||||
)
|
||||
|
||||
print(md_changelog)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("Changelog helper", allow_abbrev=False)
|
||||
|
||||
parser.add_argument("--repo-root", type=str, default=".", help="Path to the repo root")
|
||||
parser.add_argument(
|
||||
"--to-ref",
|
||||
type=str,
|
||||
help="Specify the git ref-like string (sha1, tag, HEAD~, etc.) that will mark the LAST "
|
||||
"included commit of the changelog. If this is not specified, the current project version "
|
||||
"will be used to create a changelog with the current commit as last commit.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--from-ref",
|
||||
type=str,
|
||||
help="Specify the git ref-like string (sha1, tag, HEAD~, etc.) that will mark the commit "
|
||||
"BEFORE the first included commit of the changelog. If this is not specified, the most "
|
||||
"recent actual release tag (no pre-releases) before the '--to-ref' argument will be used. "
|
||||
"If the tagged commit is not an ancestor of '--to-ref' then the most recent common ancestor"
|
||||
"(git merge-base) will be used unless '--do-not-change-from-ref' is specified.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ancestor-must-have-tag",
|
||||
action="store_true",
|
||||
help="Set if the used ancestor must have a tag associated to it.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--to-ref-must-have-tag",
|
||||
action="store_true",
|
||||
help="Set if '--to-ref' must have a tag associated to it.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--do-not-change-from-ref",
|
||||
action="store_true",
|
||||
help="Specify to prevent selecting a different '--from-ref' than the one specified in cli. "
|
||||
"Will raise an exception if '--from-ref' is not a suitable ancestor for '--to-ref' and "
|
||||
"would otherwise use the most recent common ancestor (git merge-base) as '--from-ref'.",
|
||||
)
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
main(cli_args)
|
||||
@@ -0,0 +1,30 @@
|
||||
"""File to get pylintrc notes"""
|
||||
|
||||
import argparse
|
||||
import configparser
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point"""
|
||||
|
||||
pylintrc_file_path = Path(args.pylintrc_path).resolve()
|
||||
config = configparser.ConfigParser()
|
||||
config.read(pylintrc_file_path)
|
||||
notes = sorted(x.strip() for x in config["MISCELLANEOUS"]["notes"].split(","))
|
||||
# Make sure we at least have todo in there without writing it otherwise we'll match
|
||||
notes.append("TO" + "DO")
|
||||
notes_for_grep_search = r"\|".join(notes)
|
||||
print(notes_for_grep_search)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("Parse pylintrc notes", allow_abbrev=False)
|
||||
|
||||
parser.add_argument(
|
||||
"--pylintrc-path", type=str, required=True, help="Path to pylintrc ini config"
|
||||
)
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
|
||||
main(cli_args)
|
||||
44
frontends/concrete-python/script/make_utils/is_latest.py
Normal file
44
frontends/concrete-python/script/make_utils/is_latest.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
Simple script to check if a given version is the latest version of Concrete Numpy.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
import requests # type: ignore
|
||||
from semver import VersionInfo
|
||||
|
||||
|
||||
def is_latest(new_version: VersionInfo, existing_versions: List[VersionInfo]):
|
||||
"""
|
||||
Get if `new_version` is the latest version among `existing_versions`.
|
||||
"""
|
||||
|
||||
if new_version.prerelease:
|
||||
return False
|
||||
|
||||
for existing_version in existing_versions:
|
||||
if existing_version.prerelease:
|
||||
continue
|
||||
|
||||
if existing_version > new_version:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Run the script.
|
||||
"""
|
||||
|
||||
info = requests.get("https://api.github.com/repos/zama-ai/concrete-numpy/releases").json()
|
||||
|
||||
new_version = VersionInfo.parse(sys.argv[1])
|
||||
existing_versions = [VersionInfo.parse(releases["name"][1:]) for releases in info]
|
||||
|
||||
print(is_latest(new_version, existing_versions))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
28
frontends/concrete-python/script/make_utils/is_prerelease.py
Normal file
28
frontends/concrete-python/script/make_utils/is_prerelease.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""
|
||||
Simple script to check if a given version is a pre-release version.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
from semver import VersionInfo
|
||||
|
||||
|
||||
def is_prerelease(version: VersionInfo):
|
||||
"""
|
||||
Get if `version` is a pre-release version.
|
||||
"""
|
||||
|
||||
return version.prerelease is not None
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Run the script.
|
||||
"""
|
||||
|
||||
version = VersionInfo.parse(sys.argv[1])
|
||||
print(str(is_prerelease(version)).lower())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
126
frontends/concrete-python/script/make_utils/licenses.sh
Executable file
126
frontends/concrete-python/script/make_utils/licenses.sh
Executable file
@@ -0,0 +1,126 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
BASENAME="licenses"
|
||||
LICENSE_DIRECTORY="docs"
|
||||
CHECK=0
|
||||
DIFF_TOOL="diff --ignore-all-space --ignore-tab-expansion --ignore-space-change --ignore-all-space --ignore-blank-lines --strip-trailing-cr"
|
||||
TMP_VENV_PATH="/tmp/tmp_venv"
|
||||
DO_USER_LICENSES=1
|
||||
|
||||
# Dev licences are not done, but could be re-enabled
|
||||
DO_DEV_LICENSES=0
|
||||
|
||||
OUTPUT_DIRECTORY="${LICENSE_DIRECTORY}"
|
||||
|
||||
while [ -n "$1" ]
|
||||
do
|
||||
case "$1" in
|
||||
"--check" )
|
||||
CHECK=1
|
||||
OUTPUT_DIRECTORY=$(mktemp -d)
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown param : $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
UNAME=$(uname)
|
||||
if [ "$UNAME" == "Darwin" ]
|
||||
then
|
||||
OS=mac
|
||||
elif [ "$UNAME" == "Linux" ]
|
||||
then
|
||||
OS=linux
|
||||
else
|
||||
echo "Problem with OS"
|
||||
exit 255
|
||||
fi
|
||||
|
||||
if [ $DO_USER_LICENSES -eq 1 ]
|
||||
then
|
||||
#Licenses for user (install in a temporary venv)
|
||||
echo "Doing licenses for user"
|
||||
|
||||
FILENAME="${OS}.dependency.${BASENAME}.txt"
|
||||
LICENSES_FILENAME="${LICENSE_DIRECTORY}/${FILENAME}"
|
||||
NEW_LICENSES_FILENAME="${OUTPUT_DIRECTORY}/${FILENAME}"
|
||||
|
||||
rm -rf $TMP_VENV_PATH/tmp_venv
|
||||
python3 -m venv $TMP_VENV_PATH/tmp_venv
|
||||
|
||||
# SC1090: Can't follow non-constant source. Use a directive to specify location.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
source $TMP_VENV_PATH/tmp_venv/bin/activate
|
||||
|
||||
python -m pip install -U pip wheel
|
||||
python -m pip install -U --force-reinstall setuptools
|
||||
poetry install --only main
|
||||
python -m pip install pip-licenses
|
||||
pip-licenses | grep -v "pkg\-resources\|concrete-numpy" | tee "${NEW_LICENSES_FILENAME}"
|
||||
|
||||
# Remove trailing whitespaces
|
||||
if [ "$UNAME" == "Darwin" ]
|
||||
then
|
||||
sed -i "" 's/[t ]*$//g' "${NEW_LICENSES_FILENAME}"
|
||||
else
|
||||
sed -i 's/[t ]*$//g' "${NEW_LICENSES_FILENAME}"
|
||||
fi
|
||||
|
||||
deactivate
|
||||
|
||||
if [ $CHECK -eq 1 ]
|
||||
then
|
||||
echo "$DIFF_TOOL $LICENSES_FILENAME ${NEW_LICENSES_FILENAME}"
|
||||
$DIFF_TOOL "$LICENSES_FILENAME" "${NEW_LICENSES_FILENAME}"
|
||||
echo "Success: no update in $LICENSES_FILENAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ $DO_DEV_LICENSES -eq 1 ]
|
||||
then
|
||||
# Licenses for developer (install in a temporary venv)
|
||||
echo "Doing licenses for developper"
|
||||
|
||||
FILENAME="${BASENAME}_${OS}_dev.txt"
|
||||
LICENSES_FILENAME="${LICENSE_DIRECTORY}/${FILENAME}"
|
||||
NEW_LICENSES_FILENAME="${OUTPUT_DIRECTORY}/${FILENAME}"
|
||||
|
||||
rm -rf $TMP_VENV_PATH/tmp_venv
|
||||
python3 -m venv $TMP_VENV_PATH/tmp_venv
|
||||
|
||||
# SC1090: Can't follow non-constant source. Use a directive to specify location.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
source $TMP_VENV_PATH/tmp_venv/bin/activate
|
||||
|
||||
make setup_env
|
||||
pip-licenses | grep -v "pkg\-resources\|concrete-numpy" | tee "${NEW_LICENSES_FILENAME}"
|
||||
|
||||
# Remove trailing whitespaces
|
||||
if [ "$UNAME" == "Darwin" ]
|
||||
then
|
||||
sed -i "" 's/[t ]*$//g' "${NEW_LICENSES_FILENAME}"
|
||||
else
|
||||
sed -i 's/[t ]*$//g' "${NEW_LICENSES_FILENAME}"
|
||||
fi
|
||||
|
||||
deactivate
|
||||
|
||||
if [ $CHECK -eq 1 ]
|
||||
then
|
||||
|
||||
echo "$DIFF_TOOL $LICENSES_FILENAME ${NEW_LICENSES_FILENAME}"
|
||||
$DIFF_TOOL "$LICENSES_FILENAME" "${NEW_LICENSES_FILENAME}"
|
||||
echo "Success: no update in $LICENSES_FILENAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -f ${LICENSE_DIRECTORY}/licenses_*.txt.tmp
|
||||
rm -rf $TMP_VENV_PATH/tmp_venv
|
||||
|
||||
echo "End of license script"
|
||||
7
frontends/concrete-python/script/make_utils/ncpus.sh
Executable file
7
frontends/concrete-python/script/make_utils/ncpus.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $(uname) == "Darwin" ]]; then
|
||||
sysctl -n hw.logicalcpu
|
||||
else
|
||||
nproc
|
||||
fi
|
||||
95
frontends/concrete-python/script/make_utils/setup_os_deps.sh
Executable file
95
frontends/concrete-python/script/make_utils/setup_os_deps.sh
Executable file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# From https://stackoverflow.com/a/69860299
|
||||
isDocker(){
|
||||
local cgroup=/proc/1/cgroup
|
||||
test -f $cgroup && [[ "$(<$cgroup)" = *:cpuset:/docker/* ]]
|
||||
}
|
||||
|
||||
isDockerBuildkit(){
|
||||
local cgroup=/proc/1/cgroup
|
||||
test -f $cgroup && [[ "$(<$cgroup)" = *:cpuset:/docker/buildkit/* ]]
|
||||
}
|
||||
|
||||
isDockerContainer(){
|
||||
[[ -e /.dockerenv ]]
|
||||
}
|
||||
|
||||
LINUX_INSTALL_PYTHON=0
|
||||
|
||||
while [ -n "$1" ]
|
||||
do
|
||||
case "$1" in
|
||||
"--linux-install-python" )
|
||||
LINUX_INSTALL_PYTHON=1
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown param : $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
OS_NAME=$(uname)
|
||||
|
||||
if [[ "${OS_NAME}" == "Linux" ]]; then
|
||||
# Docker build
|
||||
if isDockerBuildkit || (isDocker && ! isDockerContainer); then
|
||||
CLEAR_APT_LISTS="rm -rf /var/lib/apt/lists/* &&"
|
||||
SUDO_BIN=""
|
||||
else
|
||||
CLEAR_APT_LISTS=""
|
||||
SUDO_BIN="$(command -v sudo)"
|
||||
if [[ "${SUDO_BIN}" != "" ]]; then
|
||||
SUDO_BIN="${SUDO_BIN} "
|
||||
fi
|
||||
fi
|
||||
|
||||
PYTHON_PACKAGES=
|
||||
if [[ "${LINUX_INSTALL_PYTHON}" == "1" ]]; then
|
||||
PYTHON_PACKAGES="python3-pip \
|
||||
python3 \
|
||||
python3-dev \
|
||||
python3-tk \
|
||||
python3-venv \
|
||||
python-is-python3 \
|
||||
"
|
||||
fi
|
||||
|
||||
SETUP_CMD="${SUDO_BIN:+$SUDO_BIN}apt-get update && apt-get upgrade --no-install-recommends -y && \
|
||||
${SUDO_BIN:+$SUDO_BIN}apt-get install --no-install-recommends -y \
|
||||
build-essential \
|
||||
curl \
|
||||
sqlite3 \
|
||||
${PYTHON_PACKAGES:+$PYTHON_PACKAGES} \
|
||||
git \
|
||||
graphviz* \
|
||||
jq \
|
||||
make \
|
||||
pandoc \
|
||||
shellcheck && \
|
||||
${CLEAR_APT_LISTS:+$CLEAR_APT_LISTS} \
|
||||
pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry"
|
||||
eval "${SETUP_CMD}"
|
||||
elif [[ "${OS_NAME}" == "Darwin" ]]; then
|
||||
# Some problems with the git which is preinstalled on AWS virtual machines. Let's unlink it
|
||||
# but not fail if it is not there, so use 'cat' as a hack to be sure that, even if set -x is
|
||||
# activated later in this script, the status is still 0 == success
|
||||
brew unlink git@2.35.1 | cat
|
||||
brew install git
|
||||
|
||||
brew install curl graphviz jq make pandoc shellcheck sqlite
|
||||
python3 -m pip install -U pip
|
||||
python3 -m pip install poetry
|
||||
|
||||
echo "Make is currently installed as gmake"
|
||||
echo 'If you need to use it as "make", you can add a "gnubin" directory to your PATH from your bashrc like:'
|
||||
# shellcheck disable=SC2016
|
||||
echo 'PATH="/usr/local/opt/make/libexec/gnubin:$PATH"'
|
||||
else
|
||||
echo "Unknown OS"
|
||||
exit 1
|
||||
fi
|
||||
@@ -0,0 +1,143 @@
|
||||
"""Helper script to be able to test python code in markdown files."""
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
PYTHON_BLOCK_HINTS = ["py", "python", "python3"]
|
||||
BLOCK_STARTS = tuple(f"```{hint}" for hint in PYTHON_BLOCK_HINTS)
|
||||
BLOCK_END = "```"
|
||||
DIRECTIVE_COMMENT_PATTERN = "<!--pytest-codeblocks:(.*)-->"
|
||||
SKIP_DIRECTIVE = "skip"
|
||||
CONT_DIRECTIVE = "cont"
|
||||
|
||||
|
||||
def get_code_blocks_for_file(md_file: Path) -> Dict[int, List[str]]:
|
||||
"""Function to process an md file and test the python code in it.
|
||||
|
||||
Args:
|
||||
md_file (Path): The path to the md file to convert and test.
|
||||
|
||||
Raises:
|
||||
SyntaxError: If EOF is reached before a code block is closed.
|
||||
SyntaxError: If a block is not closed and a new python block is opened.
|
||||
|
||||
Returns:
|
||||
Dict[int, List[str]]: A dict containing the code blocks of the file.
|
||||
"""
|
||||
file_content = None
|
||||
|
||||
python_code_blocks: Dict[int, List[str]] = {}
|
||||
|
||||
def get_code_block_container(line_idx):
|
||||
block_idx = line_idx
|
||||
python_code_blocks[block_idx] = []
|
||||
return python_code_blocks[block_idx]
|
||||
|
||||
with open(md_file, encoding="utf-8") as f:
|
||||
file_content = f.readlines()
|
||||
|
||||
file_content_iterator = iter(enumerate(file_content, 1))
|
||||
python_block_continues = False
|
||||
skip_next_python_block = False
|
||||
|
||||
for line_idx, line in file_content_iterator:
|
||||
if line.startswith(BLOCK_STARTS):
|
||||
if skip_next_python_block:
|
||||
skip_next_python_block = False
|
||||
continue
|
||||
if not python_block_continues:
|
||||
current_python_code = get_code_block_container(line_idx)
|
||||
while True:
|
||||
line_idx, line = next(file_content_iterator)
|
||||
if line == "":
|
||||
# Reached EOF
|
||||
message = (
|
||||
"Reached EOF before finding the end of the current python block in "
|
||||
f"{str(md_file)}"
|
||||
)
|
||||
raise SyntaxError(message)
|
||||
|
||||
if line.strip() == BLOCK_END:
|
||||
break
|
||||
|
||||
if line.startswith(BLOCK_STARTS):
|
||||
message = (
|
||||
f"Error at line {line_idx} in file {str(md_file)}, "
|
||||
"python block was opened before the previous one was "
|
||||
"closed (missing ``` ?)"
|
||||
)
|
||||
raise SyntaxError(message)
|
||||
current_python_code.append(line)
|
||||
python_block_continues = False
|
||||
else:
|
||||
match = re.match(DIRECTIVE_COMMENT_PATTERN, line)
|
||||
if match is not None:
|
||||
directive = match.group(1)
|
||||
if directive == SKIP_DIRECTIVE:
|
||||
skip_next_python_block = True
|
||||
elif directive == CONT_DIRECTIVE:
|
||||
python_block_continues = True
|
||||
|
||||
python_block_continues = python_block_continues and not skip_next_python_block
|
||||
|
||||
return python_code_blocks
|
||||
|
||||
|
||||
def main(args):
|
||||
"""The actual processing."""
|
||||
md_dir_path = Path(args.md_dir)
|
||||
md_files = sorted(md_dir_path.glob("**/*.md"))
|
||||
|
||||
code_blocks_per_file: Dict[str, Dict[int, List[str]]] = {}
|
||||
|
||||
err_msg = ""
|
||||
|
||||
for md_file in md_files:
|
||||
md_file = md_file.resolve().absolute()
|
||||
md_file_str = str(md_file)
|
||||
# pylint: disable=broad-except
|
||||
try:
|
||||
code_blocks_per_file[md_file_str] = get_code_blocks_for_file(md_file)
|
||||
except Exception:
|
||||
err_msg += f"Error while converting {md_file_str}"
|
||||
err_msg += traceback.format_exc() + "\n"
|
||||
# pylint: enable=broad-except
|
||||
|
||||
for md_file_str, code_blocks in code_blocks_per_file.items():
|
||||
for line_idx, python_code in code_blocks.items():
|
||||
# pylint: disable=broad-except,exec-used
|
||||
try:
|
||||
print(f"Testing block starting line #{line_idx} from {md_file_str}")
|
||||
python_code = "".join(python_code)
|
||||
compiled_code = compile(python_code, filename=md_file_str, mode="exec")
|
||||
exec(compiled_code, {"__MODULE__": "__main__"}) # noqa: S102
|
||||
print("Success")
|
||||
except Exception:
|
||||
print("Failed")
|
||||
err_msg += (
|
||||
f"Error while testing block starting line #{line_idx} from {md_file_str}:\n"
|
||||
)
|
||||
err_msg += f"```\n{python_code}```\n"
|
||||
err_msg += traceback.format_exc() + "\n"
|
||||
# pylint: enable=broad-except,exec-used
|
||||
|
||||
if err_msg != "":
|
||||
print(err_msg)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
"Converts md python blocks to python files", allow_abbrev=False
|
||||
)
|
||||
parser.add_argument(
|
||||
"--md_dir", type=str, help="The path to the dir containing md files to convert."
|
||||
)
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
|
||||
main(cli_args)
|
||||
20
frontends/concrete-python/script/make_utils/upgrade_deps.sh
Executable file
20
frontends/concrete-python/script/make_utils/upgrade_deps.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# verbose output please
|
||||
set -v
|
||||
|
||||
no_dev_file=$(mktemp --suffix=.txt)
|
||||
all_file=$(mktemp --suffix=.txt)
|
||||
dev_file=$(mktemp --suffix=.txt)
|
||||
|
||||
poetry show -o -t --only main | grep -v -e "--" | cut -d " " -f 1 | sed 's/$/\@latest/g' > "${no_dev_file}"
|
||||
poetry show -o -t | grep -v -e "--" | cut -d " " -f 1 | sed 's/$/\@latest/g' > "${all_file}"
|
||||
join -v1 -v2 "${all_file}" "${no_dev_file}" > "${dev_file}"
|
||||
# shellcheck disable=SC2002
|
||||
cat "${no_dev_file}" | xargs poetry add
|
||||
# shellcheck disable=SC2002
|
||||
cat "${dev_file}" | xargs poetry add --dev
|
||||
|
||||
rm "${no_dev_file}"
|
||||
rm "${dev_file}"
|
||||
rm "${all_file}"
|
||||
222
frontends/concrete-python/script/make_utils/version_utils.py
Normal file
222
frontends/concrete-python/script/make_utils/version_utils.py
Normal file
@@ -0,0 +1,222 @@
|
||||
"""Tool to manage version in the project"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import tomlkit
|
||||
from semver import VersionInfo
|
||||
|
||||
|
||||
def strip_leading_v(version_str: str):
|
||||
"""Strip leading v of a version which is not SemVer compatible."""
|
||||
return version_str[1:] if version_str.startswith("v") else version_str
|
||||
|
||||
|
||||
def islatest(args):
|
||||
"""islatest command entry point."""
|
||||
print(args, file=sys.stderr)
|
||||
|
||||
# This is the safest default
|
||||
result = {"is_latest": False, "is_prerelease": True}
|
||||
|
||||
new_version_str = strip_leading_v(args.new_version)
|
||||
if VersionInfo.isvalid(new_version_str):
|
||||
new_version_info = VersionInfo.parse(new_version_str)
|
||||
if new_version_info.prerelease is None:
|
||||
# If it's an actual release
|
||||
all_versions_str = (
|
||||
strip_leading_v(version_str) for version_str in args.existing_versions
|
||||
)
|
||||
|
||||
# Keep versions that are not release candidate
|
||||
version_infos = [
|
||||
VersionInfo.parse(version_str)
|
||||
for version_str in all_versions_str
|
||||
if VersionInfo.isvalid(version_str)
|
||||
]
|
||||
all_non_prerelease_version_infos = [
|
||||
version_info for version_info in version_infos if version_info.prerelease is None
|
||||
]
|
||||
|
||||
all_non_prerelease_version_infos.append(new_version_info)
|
||||
|
||||
new_version_is_latest = max(all_non_prerelease_version_infos) == new_version_info
|
||||
result["is_latest"] = new_version_is_latest
|
||||
result["is_prerelease"] = False
|
||||
|
||||
print(json.dumps(result))
|
||||
|
||||
|
||||
def update_variable_in_py_file(file_path: Path, var_name: str, version_str: str):
|
||||
"""Update the version in a .py file."""
|
||||
|
||||
file_content = None
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
file_content = f.read()
|
||||
|
||||
updated_file_content = re.sub(
|
||||
rf'{var_name} *[:=] *["\'](.+)["\']',
|
||||
rf'{var_name} = "{version_str}"',
|
||||
file_content,
|
||||
)
|
||||
|
||||
with open(file_path, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(updated_file_content)
|
||||
|
||||
|
||||
def update_variable_in_toml_file(file_path: Path, var_name: str, version_str: str):
|
||||
"""Update the version in a .toml file."""
|
||||
toml_content = None
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
toml_content = tomlkit.loads(f.read())
|
||||
|
||||
toml_keys = var_name.split(".")
|
||||
current_content = toml_content
|
||||
for toml_key in toml_keys[:-1]:
|
||||
current_content = current_content[toml_key]
|
||||
last_toml_key = toml_keys[-1]
|
||||
current_content[last_toml_key] = version_str
|
||||
|
||||
with open(file_path, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(tomlkit.dumps(toml_content))
|
||||
|
||||
|
||||
def load_file_vars_set(pyproject_path: os.PathLike, cli_file_vars: Optional[List[str]]):
|
||||
"""Load files and their version variables set-up in pyproject.toml and passed as arguments."""
|
||||
|
||||
file_vars_set = set()
|
||||
if cli_file_vars is not None:
|
||||
file_vars_set.update(cli_file_vars)
|
||||
|
||||
pyproject_path = Path(pyproject_path).resolve()
|
||||
|
||||
# Check if there is a semantic release configuration
|
||||
if pyproject_path.exists():
|
||||
pyproject_content = None
|
||||
with open(pyproject_path, encoding="utf-8") as f:
|
||||
pyproject_content = tomlkit.loads(f.read())
|
||||
|
||||
try:
|
||||
sr_conf = pyproject_content["tool"]["semantic_release"]
|
||||
sr_version_toml: str = sr_conf.get("version_toml", "")
|
||||
file_vars_set.update(sr_version_toml.split(","))
|
||||
sr_version_variable: str = sr_conf.get("version_variable", "")
|
||||
file_vars_set.update(sr_version_variable.split(","))
|
||||
except KeyError:
|
||||
print("No configuration for semantic release in pyproject.toml")
|
||||
|
||||
return file_vars_set
|
||||
|
||||
|
||||
def set_version(args):
|
||||
"""set-version command entry point."""
|
||||
|
||||
version_str = strip_leading_v(args.version)
|
||||
if not VersionInfo.isvalid(version_str):
|
||||
message = f"Unable to validate version: {args.version}"
|
||||
raise RuntimeError(message)
|
||||
|
||||
file_vars_set = load_file_vars_set(args.pyproject_file, args.file_vars)
|
||||
|
||||
for file_var_str in sorted(file_vars_set):
|
||||
print(f"Processing {file_var_str}")
|
||||
file, var_name = file_var_str.split(":", 1)
|
||||
file_path = Path(file).resolve()
|
||||
|
||||
if file_path.suffix == ".py":
|
||||
update_variable_in_py_file(file_path, var_name, version_str)
|
||||
elif file_path.suffix == ".toml":
|
||||
update_variable_in_toml_file(file_path, var_name, version_str)
|
||||
else:
|
||||
message = f"Unsupported file extension: {file_path.suffix}"
|
||||
raise RuntimeError(message)
|
||||
|
||||
|
||||
def get_variable_from_py_file(file_path: Path, var_name: str):
|
||||
"""Read variable value from a .py file."""
|
||||
file_content = None
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
file_content = f.read()
|
||||
|
||||
variable_values_set = set()
|
||||
|
||||
start_pos = 0
|
||||
while True:
|
||||
file_content = file_content[start_pos:]
|
||||
match = re.search(
|
||||
rf'{var_name} *[:=] *["\'](.+)["\']',
|
||||
file_content,
|
||||
)
|
||||
if match is None:
|
||||
break
|
||||
|
||||
variable_values_set.add(match.group(1))
|
||||
start_pos = match.end()
|
||||
|
||||
return variable_values_set
|
||||
|
||||
|
||||
def get_variable_from_toml_file(file_path: Path, var_name: str):
|
||||
"""Read variable value from a .toml file."""
|
||||
|
||||
toml_content = None
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
toml_content = tomlkit.loads(f.read())
|
||||
|
||||
toml_keys = var_name.split(".")
|
||||
current_content = toml_content
|
||||
for toml_key in toml_keys:
|
||||
current_content = current_content[toml_key]
|
||||
|
||||
return current_content
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point"""
|
||||
args.entry_point(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main_parser = argparse.ArgumentParser("Version utils", allow_abbrev=False)
|
||||
|
||||
sub_parsers = main_parser.add_subparsers(dest="sub-command", required=True)
|
||||
|
||||
parser_islatest = sub_parsers.add_parser("islatest")
|
||||
parser_islatest.add_argument(
|
||||
"--new-version", type=str, required=True, help="The new version to compare"
|
||||
)
|
||||
parser_islatest.add_argument(
|
||||
"--existing-versions",
|
||||
type=str,
|
||||
nargs="+",
|
||||
required=True,
|
||||
help="The list of existing versions",
|
||||
)
|
||||
parser_islatest.set_defaults(entry_point=islatest)
|
||||
|
||||
parser_set_version = sub_parsers.add_parser("set-version")
|
||||
parser_set_version.add_argument("--version", type=str, required=True, help="The version to set")
|
||||
parser_set_version.add_argument(
|
||||
"--pyproject-file",
|
||||
type=str,
|
||||
default="pyproject.toml",
|
||||
help="The path to a project's pyproject.toml file, defaults to $pwd/pyproject.toml",
|
||||
)
|
||||
parser_set_version.add_argument(
|
||||
"--file-vars",
|
||||
type=str,
|
||||
nargs="+",
|
||||
help=(
|
||||
"A space separated list of file/path.{py, toml}:variable to update with the new version"
|
||||
),
|
||||
)
|
||||
parser_set_version.set_defaults(entry_point=set_version)
|
||||
|
||||
cli_args = main_parser.parse_args()
|
||||
|
||||
main(cli_args)
|
||||
@@ -0,0 +1,55 @@
|
||||
"""Finalizer for Jupyter notebooks."""
|
||||
import argparse
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def main():
|
||||
"""Finalize"""
|
||||
|
||||
parser = argparse.ArgumentParser(description="Sanitizer for Jupyter Notebooks")
|
||||
|
||||
parser.add_argument("base", type=str, help="directory which contains the notebooks")
|
||||
parser.add_argument("--check", action="store_true", help="flag to enable just checking mode")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
base = Path(args.base)
|
||||
notebooks = base.glob("**/*.ipynb")
|
||||
|
||||
for notebook in notebooks:
|
||||
path = str(notebook)
|
||||
if "_build" in path or ".ipynb_checkpoints" in path:
|
||||
continue
|
||||
|
||||
with open(notebook, "r", encoding="utf-8") as f:
|
||||
content = json.load(f)
|
||||
|
||||
if args.check:
|
||||
try:
|
||||
metadata = content["metadata"]
|
||||
assert len(metadata) == 1
|
||||
assert "execution" in metadata
|
||||
|
||||
execution = metadata["execution"]
|
||||
assert len(execution) == 1
|
||||
assert "timeout" in execution
|
||||
|
||||
timeout = execution["timeout"]
|
||||
assert timeout == 10800 # 3 hours
|
||||
except Exception:
|
||||
print("Notebooks are not sanitized. Please run `make conformance`.")
|
||||
raise
|
||||
else:
|
||||
content["metadata"] = {
|
||||
"execution": {
|
||||
"timeout": 10800, # 3 hours
|
||||
}
|
||||
}
|
||||
with open(notebook, "w", newline="\n", encoding="utf-8") as f:
|
||||
json.dump(content, f, indent=1, ensure_ascii=False)
|
||||
f.write("\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,46 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Run benchmarks while logging the intermediate results
|
||||
# Publish findings in the progress tracker
|
||||
|
||||
set -e
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
if [ -f .env ]
|
||||
then
|
||||
# shellcheck disable=SC1091
|
||||
set -a; source .env; set +a
|
||||
fi
|
||||
|
||||
DEV_VENV_PATH="/home/dev_user/dev_venv"
|
||||
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
if ! source "${DEV_VENV_PATH}/bin/activate"; then
|
||||
python3 -m venv "${DEV_VENV_PATH}"
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
source "${DEV_VENV_PATH}/bin/activate"
|
||||
fi
|
||||
|
||||
cd /src/ && make setup_env
|
||||
|
||||
mkdir -p /tmp/keycache
|
||||
mkdir -p logs
|
||||
|
||||
initial_concrete_log=logs/$(date -u --iso-8601=seconds).concrete.log
|
||||
make -s benchmark 2>&1 | tee -a "$initial_concrete_log"
|
||||
|
||||
final_concrete_log=logs/$(date -u --iso-8601=seconds).concrete.log
|
||||
cat -s "$initial_concrete_log" | sed '1d; $d' > "$final_concrete_log"
|
||||
|
||||
# sed above removes the first and the last lines of the log
|
||||
# which are empty to provide a nice console output
|
||||
# but empty lines are useless for logs so we get rid of them
|
||||
|
||||
rm "$initial_concrete_log"
|
||||
cp "$final_concrete_log" logs/latest.concrete.log
|
||||
|
||||
curl \
|
||||
-H 'Authorization: Bearer '"$CONCRETE_PROGRESS_TRACKER_TOKEN"'' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d @progress.json \
|
||||
-X POST "$CONCRETE_PROGRESS_TRACKER_URL"/measurement
|
||||
48
frontends/concrete-python/script/source_format/format_python.sh
Executable file
48
frontends/concrete-python/script/source_format/format_python.sh
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/bin/bash
|
||||
|
||||
function usage() {
|
||||
echo "$0: install system and data, to support compiler"
|
||||
echo
|
||||
echo "--help Print this message"
|
||||
echo "--check Do not apply format"
|
||||
echo "--dir Specify a source directory"
|
||||
echo
|
||||
}
|
||||
|
||||
CHECK=
|
||||
|
||||
while [ -n "$1" ]
|
||||
do
|
||||
case $1 in
|
||||
"--help" | "-h" )
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
|
||||
"--check" )
|
||||
CHECK="$1"
|
||||
;;
|
||||
|
||||
"--dir" )
|
||||
shift
|
||||
DIRS+=("$1")
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown param : $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
for SRC_DIR in "${DIRS[@]}"; do
|
||||
isort -l 100 --profile black ${CHECK:+"$CHECK"} "${SRC_DIR}"
|
||||
((FAILURES+=$?))
|
||||
black -l 100 ${CHECK:+"$CHECK"} "${SRC_DIR}"
|
||||
((FAILURES+=$?))
|
||||
done
|
||||
|
||||
if [[ "$FAILURES" != "0" ]]; then
|
||||
exit 1
|
||||
fi
|
||||
Reference in New Issue
Block a user