mirror of
https://github.com/zama-ai/concrete.git
synced 2026-02-08 19:44:57 -05:00
chore: Move to the mono repo layout
This commit is contained in:
251
frontends/concrete-python/script/make_utils/changelog_helper.py
Normal file
251
frontends/concrete-python/script/make_utils/changelog_helper.py
Normal file
@@ -0,0 +1,251 @@
|
||||
"""Tool to bypass the insane logic of semantic-release and generate changelogs we want"""
|
||||
|
||||
import argparse
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import deque
|
||||
|
||||
from git.repo import Repo
|
||||
from semantic_release.changelog import markdown_changelog
|
||||
from semantic_release.errors import UnknownCommitMessageStyleError
|
||||
from semantic_release.settings import config, current_commit_parser
|
||||
from semantic_release.vcs_helpers import get_repository_owner_and_name
|
||||
from semver import VersionInfo
|
||||
|
||||
|
||||
def log_msg(*args, file=sys.stderr, **kwargs):
|
||||
"""Shortcut to print to sys.stderr."""
|
||||
print(*args, file=file, **kwargs)
|
||||
|
||||
|
||||
def strip_leading_v(version_str: str):
|
||||
"""Strip leading v of a version which is not SemVer compatible."""
|
||||
return version_str[1:] if version_str.startswith("v") else version_str
|
||||
|
||||
|
||||
def get_poetry_project_version() -> VersionInfo:
|
||||
"""Run poetry version and get the project version"""
|
||||
command = ["poetry", "version"]
|
||||
poetry_version_output = subprocess.check_output(command, text=True)
|
||||
return version_string_to_version_info(poetry_version_output.split(" ")[1])
|
||||
|
||||
|
||||
def raise_exception_or_print_warning(is_error: bool, message_body: str):
|
||||
"""Raise an exception if is_error is true else print a warning to stderr"""
|
||||
msg_start = "Error" if is_error else "Warning"
|
||||
msg = f"{msg_start}: {message_body}"
|
||||
if is_error:
|
||||
raise RuntimeError(msg)
|
||||
log_msg(msg)
|
||||
|
||||
|
||||
def version_string_to_version_info(version_string: str) -> VersionInfo:
|
||||
"""Convert git tag to VersionInfo."""
|
||||
return VersionInfo.parse(strip_leading_v(version_string))
|
||||
|
||||
|
||||
def generate_changelog(repo: Repo, from_commit_excluded: str, to_commit_included: str) -> dict:
|
||||
"""Recreate the functionality from semantic release with the from and to commits.
|
||||
|
||||
Args:
|
||||
repo (Repo): the gitpython Repo object representing your git repository
|
||||
from_commit_excluded (str): the commit after which we want to collect commit messages for
|
||||
the changelog
|
||||
to_commit_included (str): the last commit included in the collected commit messages for the
|
||||
changelog.
|
||||
|
||||
Returns:
|
||||
dict: the same formatted dict as the generate_changelog from semantic-release
|
||||
"""
|
||||
# Additional sections will be added as new types are encountered
|
||||
changes: dict = {"breaking": []}
|
||||
|
||||
rev = f"{from_commit_excluded}...{to_commit_included}"
|
||||
|
||||
for commit in repo.iter_commits(rev):
|
||||
hash_ = commit.hexsha
|
||||
commit_message = (
|
||||
commit.message.replace("\r\n", "\n")
|
||||
if isinstance(commit.message, str)
|
||||
else commit.message.replace(b"\r\n", b"\n")
|
||||
)
|
||||
try:
|
||||
message = current_commit_parser()(commit_message)
|
||||
if message.type not in changes:
|
||||
log_msg(f"Creating new changelog section for {message.type} ")
|
||||
changes[message.type] = []
|
||||
|
||||
# Capitalize the first letter of the message, leaving others as they were
|
||||
# (using str.capitalize() would make the other letters lowercase)
|
||||
formatted_message = message.descriptions[0][0].upper() + message.descriptions[0][1:]
|
||||
if config.get("changelog_capitalize") is False:
|
||||
formatted_message = message.descriptions[0]
|
||||
|
||||
# By default, feat(x): description shows up in changelog with the
|
||||
# scope bolded, like:
|
||||
#
|
||||
# * **x**: description
|
||||
if config.get("changelog_scope") and message.scope:
|
||||
formatted_message = f"**{message.scope}:** {formatted_message}"
|
||||
|
||||
changes[message.type].append((hash_, formatted_message))
|
||||
|
||||
if message.breaking_descriptions:
|
||||
# Copy breaking change descriptions into changelog
|
||||
for paragraph in message.breaking_descriptions:
|
||||
changes["breaking"].append((hash_, paragraph))
|
||||
elif message.bump == 3:
|
||||
# Major, but no breaking descriptions, use commit subject instead
|
||||
changes["breaking"].append((hash_, message.descriptions[0]))
|
||||
|
||||
except UnknownCommitMessageStyleError as err:
|
||||
log_msg(f"Ignoring UnknownCommitMessageStyleError: {err}")
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point"""
|
||||
|
||||
repo = Repo(args.repo_root)
|
||||
|
||||
sha1_to_tags = {tag.commit.hexsha: tag for tag in repo.tags}
|
||||
|
||||
to_commit = repo.commit(args.to_ref)
|
||||
log_msg(f"To commit: {to_commit}")
|
||||
|
||||
to_tag = sha1_to_tags.get(to_commit.hexsha, None)
|
||||
if to_tag is None:
|
||||
raise_exception_or_print_warning(
|
||||
is_error=args.to_ref_must_have_tag,
|
||||
message_body=f"to-ref {args.to_ref} has no tag associated to it",
|
||||
)
|
||||
|
||||
to_version = (
|
||||
get_poetry_project_version()
|
||||
if to_tag is None
|
||||
else version_string_to_version_info(to_tag.name)
|
||||
)
|
||||
log_msg(f"Project version {to_version} taken from tag: {to_tag is not None}")
|
||||
|
||||
from_commit = None
|
||||
if args.from_ref is None:
|
||||
tags_by_name = {strip_leading_v(tag.name): tag for tag in repo.tags}
|
||||
version_infos = {
|
||||
VersionInfo.parse(tag_name): tag_name
|
||||
for tag_name in tags_by_name
|
||||
if VersionInfo.isvalid(tag_name)
|
||||
}
|
||||
all_release_version_infos = {
|
||||
version_info: tags_by_name[tag_name]
|
||||
for version_info, tag_name in version_infos.items()
|
||||
if version_info.prerelease is None
|
||||
}
|
||||
log_msg(f"All release versions {all_release_version_infos}")
|
||||
|
||||
versions_before_project_version = [
|
||||
version_info for version_info in all_release_version_infos if version_info < to_version
|
||||
]
|
||||
if len(versions_before_project_version) > 0:
|
||||
highest_version_before_current_version = max(versions_before_project_version)
|
||||
highest_version_tag = all_release_version_infos[highest_version_before_current_version]
|
||||
from_commit = highest_version_tag.commit
|
||||
else:
|
||||
# No versions before, get the initial commit reachable from to_commit
|
||||
# from https://stackoverflow.com/a/48232574
|
||||
last_element_extractor = deque(repo.iter_commits(to_commit), 1)
|
||||
from_commit = last_element_extractor.pop()
|
||||
else:
|
||||
from_commit = repo.commit(args.from_ref)
|
||||
|
||||
log_msg(f"From commit: {from_commit}")
|
||||
ancestor_commit = repo.merge_base(to_commit, from_commit)
|
||||
assert len(ancestor_commit) == 1
|
||||
ancestor_commit = ancestor_commit[0]
|
||||
log_msg(f"Common ancestor: {ancestor_commit}")
|
||||
|
||||
if ancestor_commit != from_commit:
|
||||
do_not_change_from_ref = args.do_not_change_from_ref and args.from_ref is not None
|
||||
raise_exception_or_print_warning(
|
||||
is_error=do_not_change_from_ref,
|
||||
message_body=(
|
||||
f"the ancestor {ancestor_commit} for {from_commit} and {to_commit} "
|
||||
f"is not the same commit as the commit for '--from-ref' {from_commit}."
|
||||
),
|
||||
)
|
||||
|
||||
ancestor_tag = sha1_to_tags.get(ancestor_commit.hexsha, None)
|
||||
if ancestor_tag is None:
|
||||
raise_exception_or_print_warning(
|
||||
is_error=args.ancestor_must_have_tag,
|
||||
message_body=(
|
||||
f"the ancestor {ancestor_commit} for " f"{from_commit} and {to_commit} has no tag"
|
||||
),
|
||||
)
|
||||
|
||||
ancestor_version_str = (
|
||||
None if ancestor_tag is None else str(version_string_to_version_info(ancestor_tag.name))
|
||||
)
|
||||
|
||||
log_msg(
|
||||
f"Collecting commits from \n{ancestor_commit} "
|
||||
f"(tag: {ancestor_tag} - parsed version "
|
||||
f"{str(ancestor_version_str)}) to \n{to_commit} "
|
||||
f"(tag: {to_tag} - parsed version {str(to_version)})"
|
||||
)
|
||||
|
||||
log_dict = generate_changelog(repo, ancestor_commit.hexsha, to_commit.hexsha)
|
||||
|
||||
owner, name = get_repository_owner_and_name()
|
||||
md_changelog = markdown_changelog(
|
||||
owner,
|
||||
name,
|
||||
str(to_version),
|
||||
log_dict,
|
||||
header=True,
|
||||
previous_version=ancestor_version_str,
|
||||
)
|
||||
|
||||
print(md_changelog)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("Changelog helper", allow_abbrev=False)
|
||||
|
||||
parser.add_argument("--repo-root", type=str, default=".", help="Path to the repo root")
|
||||
parser.add_argument(
|
||||
"--to-ref",
|
||||
type=str,
|
||||
help="Specify the git ref-like string (sha1, tag, HEAD~, etc.) that will mark the LAST "
|
||||
"included commit of the changelog. If this is not specified, the current project version "
|
||||
"will be used to create a changelog with the current commit as last commit.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--from-ref",
|
||||
type=str,
|
||||
help="Specify the git ref-like string (sha1, tag, HEAD~, etc.) that will mark the commit "
|
||||
"BEFORE the first included commit of the changelog. If this is not specified, the most "
|
||||
"recent actual release tag (no pre-releases) before the '--to-ref' argument will be used. "
|
||||
"If the tagged commit is not an ancestor of '--to-ref' then the most recent common ancestor"
|
||||
"(git merge-base) will be used unless '--do-not-change-from-ref' is specified.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--ancestor-must-have-tag",
|
||||
action="store_true",
|
||||
help="Set if the used ancestor must have a tag associated to it.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--to-ref-must-have-tag",
|
||||
action="store_true",
|
||||
help="Set if '--to-ref' must have a tag associated to it.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--do-not-change-from-ref",
|
||||
action="store_true",
|
||||
help="Specify to prevent selecting a different '--from-ref' than the one specified in cli. "
|
||||
"Will raise an exception if '--from-ref' is not a suitable ancestor for '--to-ref' and "
|
||||
"would otherwise use the most recent common ancestor (git merge-base) as '--from-ref'.",
|
||||
)
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
main(cli_args)
|
||||
@@ -0,0 +1,30 @@
|
||||
"""File to get pylintrc notes"""
|
||||
|
||||
import argparse
|
||||
import configparser
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point"""
|
||||
|
||||
pylintrc_file_path = Path(args.pylintrc_path).resolve()
|
||||
config = configparser.ConfigParser()
|
||||
config.read(pylintrc_file_path)
|
||||
notes = sorted(x.strip() for x in config["MISCELLANEOUS"]["notes"].split(","))
|
||||
# Make sure we at least have todo in there without writing it otherwise we'll match
|
||||
notes.append("TO" + "DO")
|
||||
notes_for_grep_search = r"\|".join(notes)
|
||||
print(notes_for_grep_search)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("Parse pylintrc notes", allow_abbrev=False)
|
||||
|
||||
parser.add_argument(
|
||||
"--pylintrc-path", type=str, required=True, help="Path to pylintrc ini config"
|
||||
)
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
|
||||
main(cli_args)
|
||||
44
frontends/concrete-python/script/make_utils/is_latest.py
Normal file
44
frontends/concrete-python/script/make_utils/is_latest.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
Simple script to check if a given version is the latest version of Concrete Numpy.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
import requests # type: ignore
|
||||
from semver import VersionInfo
|
||||
|
||||
|
||||
def is_latest(new_version: VersionInfo, existing_versions: List[VersionInfo]):
|
||||
"""
|
||||
Get if `new_version` is the latest version among `existing_versions`.
|
||||
"""
|
||||
|
||||
if new_version.prerelease:
|
||||
return False
|
||||
|
||||
for existing_version in existing_versions:
|
||||
if existing_version.prerelease:
|
||||
continue
|
||||
|
||||
if existing_version > new_version:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Run the script.
|
||||
"""
|
||||
|
||||
info = requests.get("https://api.github.com/repos/zama-ai/concrete-numpy/releases").json()
|
||||
|
||||
new_version = VersionInfo.parse(sys.argv[1])
|
||||
existing_versions = [VersionInfo.parse(releases["name"][1:]) for releases in info]
|
||||
|
||||
print(is_latest(new_version, existing_versions))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
28
frontends/concrete-python/script/make_utils/is_prerelease.py
Normal file
28
frontends/concrete-python/script/make_utils/is_prerelease.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""
|
||||
Simple script to check if a given version is a pre-release version.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
from semver import VersionInfo
|
||||
|
||||
|
||||
def is_prerelease(version: VersionInfo):
|
||||
"""
|
||||
Get if `version` is a pre-release version.
|
||||
"""
|
||||
|
||||
return version.prerelease is not None
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Run the script.
|
||||
"""
|
||||
|
||||
version = VersionInfo.parse(sys.argv[1])
|
||||
print(str(is_prerelease(version)).lower())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
126
frontends/concrete-python/script/make_utils/licenses.sh
Executable file
126
frontends/concrete-python/script/make_utils/licenses.sh
Executable file
@@ -0,0 +1,126 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
BASENAME="licenses"
|
||||
LICENSE_DIRECTORY="docs"
|
||||
CHECK=0
|
||||
DIFF_TOOL="diff --ignore-all-space --ignore-tab-expansion --ignore-space-change --ignore-all-space --ignore-blank-lines --strip-trailing-cr"
|
||||
TMP_VENV_PATH="/tmp/tmp_venv"
|
||||
DO_USER_LICENSES=1
|
||||
|
||||
# Dev licences are not done, but could be re-enabled
|
||||
DO_DEV_LICENSES=0
|
||||
|
||||
OUTPUT_DIRECTORY="${LICENSE_DIRECTORY}"
|
||||
|
||||
while [ -n "$1" ]
|
||||
do
|
||||
case "$1" in
|
||||
"--check" )
|
||||
CHECK=1
|
||||
OUTPUT_DIRECTORY=$(mktemp -d)
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown param : $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
UNAME=$(uname)
|
||||
if [ "$UNAME" == "Darwin" ]
|
||||
then
|
||||
OS=mac
|
||||
elif [ "$UNAME" == "Linux" ]
|
||||
then
|
||||
OS=linux
|
||||
else
|
||||
echo "Problem with OS"
|
||||
exit 255
|
||||
fi
|
||||
|
||||
if [ $DO_USER_LICENSES -eq 1 ]
|
||||
then
|
||||
#Licenses for user (install in a temporary venv)
|
||||
echo "Doing licenses for user"
|
||||
|
||||
FILENAME="${OS}.dependency.${BASENAME}.txt"
|
||||
LICENSES_FILENAME="${LICENSE_DIRECTORY}/${FILENAME}"
|
||||
NEW_LICENSES_FILENAME="${OUTPUT_DIRECTORY}/${FILENAME}"
|
||||
|
||||
rm -rf $TMP_VENV_PATH/tmp_venv
|
||||
python3 -m venv $TMP_VENV_PATH/tmp_venv
|
||||
|
||||
# SC1090: Can't follow non-constant source. Use a directive to specify location.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
source $TMP_VENV_PATH/tmp_venv/bin/activate
|
||||
|
||||
python -m pip install -U pip wheel
|
||||
python -m pip install -U --force-reinstall setuptools
|
||||
poetry install --only main
|
||||
python -m pip install pip-licenses
|
||||
pip-licenses | grep -v "pkg\-resources\|concrete-numpy" | tee "${NEW_LICENSES_FILENAME}"
|
||||
|
||||
# Remove trailing whitespaces
|
||||
if [ "$UNAME" == "Darwin" ]
|
||||
then
|
||||
sed -i "" 's/[t ]*$//g' "${NEW_LICENSES_FILENAME}"
|
||||
else
|
||||
sed -i 's/[t ]*$//g' "${NEW_LICENSES_FILENAME}"
|
||||
fi
|
||||
|
||||
deactivate
|
||||
|
||||
if [ $CHECK -eq 1 ]
|
||||
then
|
||||
echo "$DIFF_TOOL $LICENSES_FILENAME ${NEW_LICENSES_FILENAME}"
|
||||
$DIFF_TOOL "$LICENSES_FILENAME" "${NEW_LICENSES_FILENAME}"
|
||||
echo "Success: no update in $LICENSES_FILENAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ $DO_DEV_LICENSES -eq 1 ]
|
||||
then
|
||||
# Licenses for developer (install in a temporary venv)
|
||||
echo "Doing licenses for developper"
|
||||
|
||||
FILENAME="${BASENAME}_${OS}_dev.txt"
|
||||
LICENSES_FILENAME="${LICENSE_DIRECTORY}/${FILENAME}"
|
||||
NEW_LICENSES_FILENAME="${OUTPUT_DIRECTORY}/${FILENAME}"
|
||||
|
||||
rm -rf $TMP_VENV_PATH/tmp_venv
|
||||
python3 -m venv $TMP_VENV_PATH/tmp_venv
|
||||
|
||||
# SC1090: Can't follow non-constant source. Use a directive to specify location.
|
||||
# shellcheck disable=SC1090,SC1091
|
||||
source $TMP_VENV_PATH/tmp_venv/bin/activate
|
||||
|
||||
make setup_env
|
||||
pip-licenses | grep -v "pkg\-resources\|concrete-numpy" | tee "${NEW_LICENSES_FILENAME}"
|
||||
|
||||
# Remove trailing whitespaces
|
||||
if [ "$UNAME" == "Darwin" ]
|
||||
then
|
||||
sed -i "" 's/[t ]*$//g' "${NEW_LICENSES_FILENAME}"
|
||||
else
|
||||
sed -i 's/[t ]*$//g' "${NEW_LICENSES_FILENAME}"
|
||||
fi
|
||||
|
||||
deactivate
|
||||
|
||||
if [ $CHECK -eq 1 ]
|
||||
then
|
||||
|
||||
echo "$DIFF_TOOL $LICENSES_FILENAME ${NEW_LICENSES_FILENAME}"
|
||||
$DIFF_TOOL "$LICENSES_FILENAME" "${NEW_LICENSES_FILENAME}"
|
||||
echo "Success: no update in $LICENSES_FILENAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -f ${LICENSE_DIRECTORY}/licenses_*.txt.tmp
|
||||
rm -rf $TMP_VENV_PATH/tmp_venv
|
||||
|
||||
echo "End of license script"
|
||||
7
frontends/concrete-python/script/make_utils/ncpus.sh
Executable file
7
frontends/concrete-python/script/make_utils/ncpus.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [[ $(uname) == "Darwin" ]]; then
|
||||
sysctl -n hw.logicalcpu
|
||||
else
|
||||
nproc
|
||||
fi
|
||||
95
frontends/concrete-python/script/make_utils/setup_os_deps.sh
Executable file
95
frontends/concrete-python/script/make_utils/setup_os_deps.sh
Executable file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# From https://stackoverflow.com/a/69860299
|
||||
isDocker(){
|
||||
local cgroup=/proc/1/cgroup
|
||||
test -f $cgroup && [[ "$(<$cgroup)" = *:cpuset:/docker/* ]]
|
||||
}
|
||||
|
||||
isDockerBuildkit(){
|
||||
local cgroup=/proc/1/cgroup
|
||||
test -f $cgroup && [[ "$(<$cgroup)" = *:cpuset:/docker/buildkit/* ]]
|
||||
}
|
||||
|
||||
isDockerContainer(){
|
||||
[[ -e /.dockerenv ]]
|
||||
}
|
||||
|
||||
LINUX_INSTALL_PYTHON=0
|
||||
|
||||
while [ -n "$1" ]
|
||||
do
|
||||
case "$1" in
|
||||
"--linux-install-python" )
|
||||
LINUX_INSTALL_PYTHON=1
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown param : $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
OS_NAME=$(uname)
|
||||
|
||||
if [[ "${OS_NAME}" == "Linux" ]]; then
|
||||
# Docker build
|
||||
if isDockerBuildkit || (isDocker && ! isDockerContainer); then
|
||||
CLEAR_APT_LISTS="rm -rf /var/lib/apt/lists/* &&"
|
||||
SUDO_BIN=""
|
||||
else
|
||||
CLEAR_APT_LISTS=""
|
||||
SUDO_BIN="$(command -v sudo)"
|
||||
if [[ "${SUDO_BIN}" != "" ]]; then
|
||||
SUDO_BIN="${SUDO_BIN} "
|
||||
fi
|
||||
fi
|
||||
|
||||
PYTHON_PACKAGES=
|
||||
if [[ "${LINUX_INSTALL_PYTHON}" == "1" ]]; then
|
||||
PYTHON_PACKAGES="python3-pip \
|
||||
python3 \
|
||||
python3-dev \
|
||||
python3-tk \
|
||||
python3-venv \
|
||||
python-is-python3 \
|
||||
"
|
||||
fi
|
||||
|
||||
SETUP_CMD="${SUDO_BIN:+$SUDO_BIN}apt-get update && apt-get upgrade --no-install-recommends -y && \
|
||||
${SUDO_BIN:+$SUDO_BIN}apt-get install --no-install-recommends -y \
|
||||
build-essential \
|
||||
curl \
|
||||
sqlite3 \
|
||||
${PYTHON_PACKAGES:+$PYTHON_PACKAGES} \
|
||||
git \
|
||||
graphviz* \
|
||||
jq \
|
||||
make \
|
||||
pandoc \
|
||||
shellcheck && \
|
||||
${CLEAR_APT_LISTS:+$CLEAR_APT_LISTS} \
|
||||
pip install --no-cache-dir --upgrade pip && \
|
||||
pip install --no-cache-dir poetry"
|
||||
eval "${SETUP_CMD}"
|
||||
elif [[ "${OS_NAME}" == "Darwin" ]]; then
|
||||
# Some problems with the git which is preinstalled on AWS virtual machines. Let's unlink it
|
||||
# but not fail if it is not there, so use 'cat' as a hack to be sure that, even if set -x is
|
||||
# activated later in this script, the status is still 0 == success
|
||||
brew unlink git@2.35.1 | cat
|
||||
brew install git
|
||||
|
||||
brew install curl graphviz jq make pandoc shellcheck sqlite
|
||||
python3 -m pip install -U pip
|
||||
python3 -m pip install poetry
|
||||
|
||||
echo "Make is currently installed as gmake"
|
||||
echo 'If you need to use it as "make", you can add a "gnubin" directory to your PATH from your bashrc like:'
|
||||
# shellcheck disable=SC2016
|
||||
echo 'PATH="/usr/local/opt/make/libexec/gnubin:$PATH"'
|
||||
else
|
||||
echo "Unknown OS"
|
||||
exit 1
|
||||
fi
|
||||
@@ -0,0 +1,143 @@
|
||||
"""Helper script to be able to test python code in markdown files."""
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import sys
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
PYTHON_BLOCK_HINTS = ["py", "python", "python3"]
|
||||
BLOCK_STARTS = tuple(f"```{hint}" for hint in PYTHON_BLOCK_HINTS)
|
||||
BLOCK_END = "```"
|
||||
DIRECTIVE_COMMENT_PATTERN = "<!--pytest-codeblocks:(.*)-->"
|
||||
SKIP_DIRECTIVE = "skip"
|
||||
CONT_DIRECTIVE = "cont"
|
||||
|
||||
|
||||
def get_code_blocks_for_file(md_file: Path) -> Dict[int, List[str]]:
|
||||
"""Function to process an md file and test the python code in it.
|
||||
|
||||
Args:
|
||||
md_file (Path): The path to the md file to convert and test.
|
||||
|
||||
Raises:
|
||||
SyntaxError: If EOF is reached before a code block is closed.
|
||||
SyntaxError: If a block is not closed and a new python block is opened.
|
||||
|
||||
Returns:
|
||||
Dict[int, List[str]]: A dict containing the code blocks of the file.
|
||||
"""
|
||||
file_content = None
|
||||
|
||||
python_code_blocks: Dict[int, List[str]] = {}
|
||||
|
||||
def get_code_block_container(line_idx):
|
||||
block_idx = line_idx
|
||||
python_code_blocks[block_idx] = []
|
||||
return python_code_blocks[block_idx]
|
||||
|
||||
with open(md_file, encoding="utf-8") as f:
|
||||
file_content = f.readlines()
|
||||
|
||||
file_content_iterator = iter(enumerate(file_content, 1))
|
||||
python_block_continues = False
|
||||
skip_next_python_block = False
|
||||
|
||||
for line_idx, line in file_content_iterator:
|
||||
if line.startswith(BLOCK_STARTS):
|
||||
if skip_next_python_block:
|
||||
skip_next_python_block = False
|
||||
continue
|
||||
if not python_block_continues:
|
||||
current_python_code = get_code_block_container(line_idx)
|
||||
while True:
|
||||
line_idx, line = next(file_content_iterator)
|
||||
if line == "":
|
||||
# Reached EOF
|
||||
message = (
|
||||
"Reached EOF before finding the end of the current python block in "
|
||||
f"{str(md_file)}"
|
||||
)
|
||||
raise SyntaxError(message)
|
||||
|
||||
if line.strip() == BLOCK_END:
|
||||
break
|
||||
|
||||
if line.startswith(BLOCK_STARTS):
|
||||
message = (
|
||||
f"Error at line {line_idx} in file {str(md_file)}, "
|
||||
"python block was opened before the previous one was "
|
||||
"closed (missing ``` ?)"
|
||||
)
|
||||
raise SyntaxError(message)
|
||||
current_python_code.append(line)
|
||||
python_block_continues = False
|
||||
else:
|
||||
match = re.match(DIRECTIVE_COMMENT_PATTERN, line)
|
||||
if match is not None:
|
||||
directive = match.group(1)
|
||||
if directive == SKIP_DIRECTIVE:
|
||||
skip_next_python_block = True
|
||||
elif directive == CONT_DIRECTIVE:
|
||||
python_block_continues = True
|
||||
|
||||
python_block_continues = python_block_continues and not skip_next_python_block
|
||||
|
||||
return python_code_blocks
|
||||
|
||||
|
||||
def main(args):
|
||||
"""The actual processing."""
|
||||
md_dir_path = Path(args.md_dir)
|
||||
md_files = sorted(md_dir_path.glob("**/*.md"))
|
||||
|
||||
code_blocks_per_file: Dict[str, Dict[int, List[str]]] = {}
|
||||
|
||||
err_msg = ""
|
||||
|
||||
for md_file in md_files:
|
||||
md_file = md_file.resolve().absolute()
|
||||
md_file_str = str(md_file)
|
||||
# pylint: disable=broad-except
|
||||
try:
|
||||
code_blocks_per_file[md_file_str] = get_code_blocks_for_file(md_file)
|
||||
except Exception:
|
||||
err_msg += f"Error while converting {md_file_str}"
|
||||
err_msg += traceback.format_exc() + "\n"
|
||||
# pylint: enable=broad-except
|
||||
|
||||
for md_file_str, code_blocks in code_blocks_per_file.items():
|
||||
for line_idx, python_code in code_blocks.items():
|
||||
# pylint: disable=broad-except,exec-used
|
||||
try:
|
||||
print(f"Testing block starting line #{line_idx} from {md_file_str}")
|
||||
python_code = "".join(python_code)
|
||||
compiled_code = compile(python_code, filename=md_file_str, mode="exec")
|
||||
exec(compiled_code, {"__MODULE__": "__main__"}) # noqa: S102
|
||||
print("Success")
|
||||
except Exception:
|
||||
print("Failed")
|
||||
err_msg += (
|
||||
f"Error while testing block starting line #{line_idx} from {md_file_str}:\n"
|
||||
)
|
||||
err_msg += f"```\n{python_code}```\n"
|
||||
err_msg += traceback.format_exc() + "\n"
|
||||
# pylint: enable=broad-except,exec-used
|
||||
|
||||
if err_msg != "":
|
||||
print(err_msg)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
"Converts md python blocks to python files", allow_abbrev=False
|
||||
)
|
||||
parser.add_argument(
|
||||
"--md_dir", type=str, help="The path to the dir containing md files to convert."
|
||||
)
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
|
||||
main(cli_args)
|
||||
20
frontends/concrete-python/script/make_utils/upgrade_deps.sh
Executable file
20
frontends/concrete-python/script/make_utils/upgrade_deps.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# verbose output please
|
||||
set -v
|
||||
|
||||
no_dev_file=$(mktemp --suffix=.txt)
|
||||
all_file=$(mktemp --suffix=.txt)
|
||||
dev_file=$(mktemp --suffix=.txt)
|
||||
|
||||
poetry show -o -t --only main | grep -v -e "--" | cut -d " " -f 1 | sed 's/$/\@latest/g' > "${no_dev_file}"
|
||||
poetry show -o -t | grep -v -e "--" | cut -d " " -f 1 | sed 's/$/\@latest/g' > "${all_file}"
|
||||
join -v1 -v2 "${all_file}" "${no_dev_file}" > "${dev_file}"
|
||||
# shellcheck disable=SC2002
|
||||
cat "${no_dev_file}" | xargs poetry add
|
||||
# shellcheck disable=SC2002
|
||||
cat "${dev_file}" | xargs poetry add --dev
|
||||
|
||||
rm "${no_dev_file}"
|
||||
rm "${dev_file}"
|
||||
rm "${all_file}"
|
||||
222
frontends/concrete-python/script/make_utils/version_utils.py
Normal file
222
frontends/concrete-python/script/make_utils/version_utils.py
Normal file
@@ -0,0 +1,222 @@
|
||||
"""Tool to manage version in the project"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
import tomlkit
|
||||
from semver import VersionInfo
|
||||
|
||||
|
||||
def strip_leading_v(version_str: str):
|
||||
"""Strip leading v of a version which is not SemVer compatible."""
|
||||
return version_str[1:] if version_str.startswith("v") else version_str
|
||||
|
||||
|
||||
def islatest(args):
|
||||
"""islatest command entry point."""
|
||||
print(args, file=sys.stderr)
|
||||
|
||||
# This is the safest default
|
||||
result = {"is_latest": False, "is_prerelease": True}
|
||||
|
||||
new_version_str = strip_leading_v(args.new_version)
|
||||
if VersionInfo.isvalid(new_version_str):
|
||||
new_version_info = VersionInfo.parse(new_version_str)
|
||||
if new_version_info.prerelease is None:
|
||||
# If it's an actual release
|
||||
all_versions_str = (
|
||||
strip_leading_v(version_str) for version_str in args.existing_versions
|
||||
)
|
||||
|
||||
# Keep versions that are not release candidate
|
||||
version_infos = [
|
||||
VersionInfo.parse(version_str)
|
||||
for version_str in all_versions_str
|
||||
if VersionInfo.isvalid(version_str)
|
||||
]
|
||||
all_non_prerelease_version_infos = [
|
||||
version_info for version_info in version_infos if version_info.prerelease is None
|
||||
]
|
||||
|
||||
all_non_prerelease_version_infos.append(new_version_info)
|
||||
|
||||
new_version_is_latest = max(all_non_prerelease_version_infos) == new_version_info
|
||||
result["is_latest"] = new_version_is_latest
|
||||
result["is_prerelease"] = False
|
||||
|
||||
print(json.dumps(result))
|
||||
|
||||
|
||||
def update_variable_in_py_file(file_path: Path, var_name: str, version_str: str):
|
||||
"""Update the version in a .py file."""
|
||||
|
||||
file_content = None
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
file_content = f.read()
|
||||
|
||||
updated_file_content = re.sub(
|
||||
rf'{var_name} *[:=] *["\'](.+)["\']',
|
||||
rf'{var_name} = "{version_str}"',
|
||||
file_content,
|
||||
)
|
||||
|
||||
with open(file_path, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(updated_file_content)
|
||||
|
||||
|
||||
def update_variable_in_toml_file(file_path: Path, var_name: str, version_str: str):
|
||||
"""Update the version in a .toml file."""
|
||||
toml_content = None
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
toml_content = tomlkit.loads(f.read())
|
||||
|
||||
toml_keys = var_name.split(".")
|
||||
current_content = toml_content
|
||||
for toml_key in toml_keys[:-1]:
|
||||
current_content = current_content[toml_key]
|
||||
last_toml_key = toml_keys[-1]
|
||||
current_content[last_toml_key] = version_str
|
||||
|
||||
with open(file_path, "w", encoding="utf-8", newline="\n") as f:
|
||||
f.write(tomlkit.dumps(toml_content))
|
||||
|
||||
|
||||
def load_file_vars_set(pyproject_path: os.PathLike, cli_file_vars: Optional[List[str]]):
|
||||
"""Load files and their version variables set-up in pyproject.toml and passed as arguments."""
|
||||
|
||||
file_vars_set = set()
|
||||
if cli_file_vars is not None:
|
||||
file_vars_set.update(cli_file_vars)
|
||||
|
||||
pyproject_path = Path(pyproject_path).resolve()
|
||||
|
||||
# Check if there is a semantic release configuration
|
||||
if pyproject_path.exists():
|
||||
pyproject_content = None
|
||||
with open(pyproject_path, encoding="utf-8") as f:
|
||||
pyproject_content = tomlkit.loads(f.read())
|
||||
|
||||
try:
|
||||
sr_conf = pyproject_content["tool"]["semantic_release"]
|
||||
sr_version_toml: str = sr_conf.get("version_toml", "")
|
||||
file_vars_set.update(sr_version_toml.split(","))
|
||||
sr_version_variable: str = sr_conf.get("version_variable", "")
|
||||
file_vars_set.update(sr_version_variable.split(","))
|
||||
except KeyError:
|
||||
print("No configuration for semantic release in pyproject.toml")
|
||||
|
||||
return file_vars_set
|
||||
|
||||
|
||||
def set_version(args):
|
||||
"""set-version command entry point."""
|
||||
|
||||
version_str = strip_leading_v(args.version)
|
||||
if not VersionInfo.isvalid(version_str):
|
||||
message = f"Unable to validate version: {args.version}"
|
||||
raise RuntimeError(message)
|
||||
|
||||
file_vars_set = load_file_vars_set(args.pyproject_file, args.file_vars)
|
||||
|
||||
for file_var_str in sorted(file_vars_set):
|
||||
print(f"Processing {file_var_str}")
|
||||
file, var_name = file_var_str.split(":", 1)
|
||||
file_path = Path(file).resolve()
|
||||
|
||||
if file_path.suffix == ".py":
|
||||
update_variable_in_py_file(file_path, var_name, version_str)
|
||||
elif file_path.suffix == ".toml":
|
||||
update_variable_in_toml_file(file_path, var_name, version_str)
|
||||
else:
|
||||
message = f"Unsupported file extension: {file_path.suffix}"
|
||||
raise RuntimeError(message)
|
||||
|
||||
|
||||
def get_variable_from_py_file(file_path: Path, var_name: str):
|
||||
"""Read variable value from a .py file."""
|
||||
file_content = None
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
file_content = f.read()
|
||||
|
||||
variable_values_set = set()
|
||||
|
||||
start_pos = 0
|
||||
while True:
|
||||
file_content = file_content[start_pos:]
|
||||
match = re.search(
|
||||
rf'{var_name} *[:=] *["\'](.+)["\']',
|
||||
file_content,
|
||||
)
|
||||
if match is None:
|
||||
break
|
||||
|
||||
variable_values_set.add(match.group(1))
|
||||
start_pos = match.end()
|
||||
|
||||
return variable_values_set
|
||||
|
||||
|
||||
def get_variable_from_toml_file(file_path: Path, var_name: str):
|
||||
"""Read variable value from a .toml file."""
|
||||
|
||||
toml_content = None
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
toml_content = tomlkit.loads(f.read())
|
||||
|
||||
toml_keys = var_name.split(".")
|
||||
current_content = toml_content
|
||||
for toml_key in toml_keys:
|
||||
current_content = current_content[toml_key]
|
||||
|
||||
return current_content
|
||||
|
||||
|
||||
def main(args):
|
||||
"""Entry point"""
|
||||
args.entry_point(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main_parser = argparse.ArgumentParser("Version utils", allow_abbrev=False)
|
||||
|
||||
sub_parsers = main_parser.add_subparsers(dest="sub-command", required=True)
|
||||
|
||||
parser_islatest = sub_parsers.add_parser("islatest")
|
||||
parser_islatest.add_argument(
|
||||
"--new-version", type=str, required=True, help="The new version to compare"
|
||||
)
|
||||
parser_islatest.add_argument(
|
||||
"--existing-versions",
|
||||
type=str,
|
||||
nargs="+",
|
||||
required=True,
|
||||
help="The list of existing versions",
|
||||
)
|
||||
parser_islatest.set_defaults(entry_point=islatest)
|
||||
|
||||
parser_set_version = sub_parsers.add_parser("set-version")
|
||||
parser_set_version.add_argument("--version", type=str, required=True, help="The version to set")
|
||||
parser_set_version.add_argument(
|
||||
"--pyproject-file",
|
||||
type=str,
|
||||
default="pyproject.toml",
|
||||
help="The path to a project's pyproject.toml file, defaults to $pwd/pyproject.toml",
|
||||
)
|
||||
parser_set_version.add_argument(
|
||||
"--file-vars",
|
||||
type=str,
|
||||
nargs="+",
|
||||
help=(
|
||||
"A space separated list of file/path.{py, toml}:variable to update with the new version"
|
||||
),
|
||||
)
|
||||
parser_set_version.set_defaults(entry_point=set_version)
|
||||
|
||||
cli_args = main_parser.parse_args()
|
||||
|
||||
main(cli_args)
|
||||
Reference in New Issue
Block a user