refactor: rename [jit|library]lambdasupport to [jit|library]support

This commit is contained in:
youben11
2022-03-31 20:17:10 +01:00
committed by Ayoub Benaissa
parent 583d5edf00
commit 78def04fe5
19 changed files with 117 additions and 134 deletions

View File

@@ -7,9 +7,9 @@
#define CONCRETELANG_C_SUPPORT_COMPILER_ENGINE_H
#include "concretelang/Support/CompilerEngine.h"
#include "concretelang/Support/JITSupport.h"
#include "concretelang/Support/Jit.h"
#include "concretelang/Support/JitLambdaSupport.h"
#include "concretelang/Support/LibraryLambdaSupport.h"
#include "concretelang/Support/LibrarySupport.h"
#include "mlir-c/IR.h"
#include "mlir-c/Registration.h"
@@ -32,55 +32,52 @@ typedef struct executionArguments executionArguments;
// JIT Support bindings ///////////////////////////////////////////////////////
struct JITLambdaSupport_C {
mlir::concretelang::JitLambdaSupport support;
struct JITSupport_C {
mlir::concretelang::JITSupport support;
};
typedef struct JITLambdaSupport_C JITLambdaSupport_C;
typedef struct JITSupport_C JITSupport_C;
MLIR_CAPI_EXPORTED JITLambdaSupport_C
jit_lambda_support(std::string runtimeLibPath);
MLIR_CAPI_EXPORTED JITSupport_C jit_support(std::string runtimeLibPath);
MLIR_CAPI_EXPORTED std::unique_ptr<mlir::concretelang::JitCompilationResult>
jit_compile(JITLambdaSupport_C support, const char *module,
jit_compile(JITSupport_C support, const char *module,
mlir::concretelang::CompilationOptions options);
MLIR_CAPI_EXPORTED mlir::concretelang::ClientParameters
jit_load_client_parameters(JITLambdaSupport_C support,
jit_load_client_parameters(JITSupport_C support,
mlir::concretelang::JitCompilationResult &);
MLIR_CAPI_EXPORTED std::shared_ptr<mlir::concretelang::JITLambda>
jit_load_server_lambda(JITLambdaSupport_C support,
jit_load_server_lambda(JITSupport_C support,
mlir::concretelang::JitCompilationResult &);
MLIR_CAPI_EXPORTED std::unique_ptr<concretelang::clientlib::PublicResult>
jit_server_call(JITLambdaSupport_C support,
mlir::concretelang::JITLambda &lambda,
jit_server_call(JITSupport_C support, mlir::concretelang::JITLambda &lambda,
concretelang::clientlib::PublicArguments &args);
// Library Support bindings ///////////////////////////////////////////////////
struct LibraryLambdaSupport_C {
mlir::concretelang::LibraryLambdaSupport support;
struct LibrarySupport_C {
mlir::concretelang::LibrarySupport support;
};
typedef struct LibraryLambdaSupport_C LibraryLambdaSupport_C;
typedef struct LibrarySupport_C LibrarySupport_C;
MLIR_CAPI_EXPORTED LibraryLambdaSupport_C
library_lambda_support(const char *outputPath);
MLIR_CAPI_EXPORTED LibrarySupport_C library_support(const char *outputPath);
MLIR_CAPI_EXPORTED std::unique_ptr<mlir::concretelang::LibraryCompilationResult>
library_compile(LibraryLambdaSupport_C support, const char *module,
library_compile(LibrarySupport_C support, const char *module,
mlir::concretelang::CompilationOptions options);
MLIR_CAPI_EXPORTED mlir::concretelang::ClientParameters
library_load_client_parameters(LibraryLambdaSupport_C support,
library_load_client_parameters(LibrarySupport_C support,
mlir::concretelang::LibraryCompilationResult &);
MLIR_CAPI_EXPORTED concretelang::serverlib::ServerLambda
library_load_server_lambda(LibraryLambdaSupport_C support,
library_load_server_lambda(LibrarySupport_C support,
mlir::concretelang::LibraryCompilationResult &);
MLIR_CAPI_EXPORTED std::unique_ptr<concretelang::clientlib::PublicResult>
library_server_call(LibraryLambdaSupport_C support,
library_server_call(LibrarySupport_C support,
concretelang::serverlib::ServerLambda lambda,
concretelang::clientlib::PublicArguments &args);

View File

@@ -3,8 +3,8 @@
// https://github.com/zama-ai/concrete-compiler-internal/blob/master/LICENSE.txt
// for license information.
#ifndef CONCRETELANG_SUPPORT_JITLAMBDA_SUPPORT
#define CONCRETELANG_SUPPORT_JITLAMBDA_SUPPORT
#ifndef CONCRETELANG_SUPPORT_JIT_SUPPORT
#define CONCRETELANG_SUPPORT_JIT_SUPPORT
#include <mlir/Dialect/LLVMIR/LLVMDialect.h>
#include <mlir/ExecutionEngine/ExecutionEngine.h>
@@ -27,13 +27,13 @@ struct JitCompilationResult {
clientlib::ClientParameters clientParameters;
};
/// JitLambdaSupport is the instantiated LambdaSupport for the Jit Compilation.
class JitLambdaSupport
/// JITSupport is the instantiated LambdaSupport for the Jit Compilation.
class JITSupport
: public LambdaSupport<std::shared_ptr<concretelang::JITLambda>,
JitCompilationResult> {
public:
JitLambdaSupport(llvm::Optional<std::string> runtimeLibPath = llvm::None);
JITSupport(llvm::Optional<std::string> runtimeLibPath = llvm::None);
llvm::Expected<std::unique_ptr<JitCompilationResult>>
compile(llvm::SourceMgr &program, CompilationOptions options) override;

View File

@@ -3,8 +3,8 @@
// https://github.com/zama-ai/concrete-compiler-internal/blob/master/LICENSE.txt
// for license information.
#ifndef CONCRETELANG_SUPPORT_LIBRARY_LAMBDA_SUPPORT
#define CONCRETELANG_SUPPORT_LIBRARY_LAMBDA_SUPPORT
#ifndef CONCRETELANG_SUPPORT_LIBRARY_SUPPORT
#define CONCRETELANG_SUPPORT_LIBRARY_SUPPORT
#include <mlir/Dialect/LLVMIR/LLVMDialect.h>
#include <mlir/ExecutionEngine/ExecutionEngine.h>
@@ -29,11 +29,11 @@ struct LibraryCompilationResult {
std::string funcName;
};
class LibraryLambdaSupport
class LibrarySupport
: public LambdaSupport<serverlib::ServerLambda, LibraryCompilationResult> {
public:
LibraryLambdaSupport(std::string outputPath) : outputPath(outputPath) {}
LibrarySupport(std::string outputPath) : outputPath(outputPath) {}
llvm::Expected<std::unique_ptr<LibraryCompilationResult>>
compile(llvm::SourceMgr &program, CompilationOptions options) override {

View File

@@ -31,13 +31,13 @@ declare_mlir_python_sources(ConcretelangBindingsPythonSources
concrete/compiler/client_support.py
concrete/compiler/compilation_options.py
concrete/compiler/jit_compilation_result.py
concrete/compiler/jit_lambda_support.py
concrete/compiler/jit_support.py
concrete/compiler/jit_lambda.py
concrete/compiler/key_set_cache.py
concrete/compiler/key_set.py
concrete/compiler/lambda_argument.py
concrete/compiler/library_compilation_result.py
concrete/compiler/library_lambda_support.py
concrete/compiler/library_support.py
concrete/compiler/library_lambda.py
concrete/compiler/public_arguments.py
concrete/compiler/public_result.py

View File

@@ -6,8 +6,8 @@
#include "CompilerAPIModule.h"
#include "concretelang-c/Support/CompilerEngine.h"
#include "concretelang/Dialect/FHE/IR/FHEOpsDialect.h.inc"
#include "concretelang/Support/JITSupport.h"
#include "concretelang/Support/Jit.h"
#include "concretelang/Support/JitLambdaSupport.h"
#include <mlir/Dialect/MemRef/IR/MemRef.h>
#include <mlir/Dialect/StandardOps/IR/Ops.h>
#include <mlir/ExecutionEngine/OptUtils.h>
@@ -20,7 +20,7 @@
#include <string>
using mlir::concretelang::CompilationOptions;
using mlir::concretelang::JitLambdaSupport;
using mlir::concretelang::JITSupport;
using mlir::concretelang::LambdaArgument;
/// Populate the compiler API python module.
@@ -57,29 +57,29 @@ void mlir::concretelang::python::populateCompilerAPISubmodule(
pybind11::class_<mlir::concretelang::JITLambda,
std::shared_ptr<mlir::concretelang::JITLambda>>(m,
"JITLambda");
pybind11::class_<JITLambdaSupport_C>(m, "JITLambdaSupport")
pybind11::class_<JITSupport_C>(m, "JITSupport")
.def(pybind11::init([](std::string runtimeLibPath) {
return jit_lambda_support(runtimeLibPath);
return jit_support(runtimeLibPath);
}))
.def("compile",
[](JITLambdaSupport_C &support, std::string mlir_program,
[](JITSupport_C &support, std::string mlir_program,
CompilationOptions options) {
return jit_compile(support, mlir_program.c_str(), options);
})
.def("load_client_parameters",
[](JITLambdaSupport_C &support,
[](JITSupport_C &support,
mlir::concretelang::JitCompilationResult &result) {
return jit_load_client_parameters(support, result);
})
.def(
"load_server_lambda",
[](JITLambdaSupport_C &support,
[](JITSupport_C &support,
mlir::concretelang::JitCompilationResult &result) {
return jit_load_server_lambda(support, result);
},
pybind11::return_value_policy::reference)
.def("server_call",
[](JITLambdaSupport_C &support, concretelang::JITLambda &lambda,
[](JITSupport_C &support, concretelang::JITLambda &lambda,
clientlib::PublicArguments &publicArguments) {
return jit_server_call(support, lambda, publicArguments);
});
@@ -93,29 +93,29 @@ void mlir::concretelang::python::populateCompilerAPISubmodule(
};
}));
pybind11::class_<concretelang::serverlib::ServerLambda>(m, "LibraryLambda");
pybind11::class_<LibraryLambdaSupport_C>(m, "LibraryLambdaSupport")
pybind11::class_<LibrarySupport_C>(m, "LibrarySupport")
.def(pybind11::init([](std::string outputPath) {
return library_lambda_support(outputPath.c_str());
return library_support(outputPath.c_str());
}))
.def("compile",
[](LibraryLambdaSupport_C &support, std::string mlir_program,
[](LibrarySupport_C &support, std::string mlir_program,
mlir::concretelang::CompilationOptions options) {
return library_compile(support, mlir_program.c_str(), options);
})
.def("load_client_parameters",
[](LibraryLambdaSupport_C &support,
[](LibrarySupport_C &support,
mlir::concretelang::LibraryCompilationResult &result) {
return library_load_client_parameters(support, result);
})
.def(
"load_server_lambda",
[](LibraryLambdaSupport_C &support,
[](LibrarySupport_C &support,
mlir::concretelang::LibraryCompilationResult &result) {
return library_load_server_lambda(support, result);
},
pybind11::return_value_policy::reference)
.def("server_call",
[](LibraryLambdaSupport_C &support, serverlib::ServerLambda lambda,
[](LibrarySupport_C &support, serverlib::ServerLambda lambda,
clientlib::PublicArguments &publicArguments) {
return library_server_call(support, lambda, publicArguments);
});

View File

@@ -21,8 +21,8 @@ from .public_arguments import PublicArguments
from .jit_compilation_result import JITCompilationResult
from .jit_lambda import JITLambda
from .client_support import ClientSupport
from .jit_lambda_support import JITLambdaSupport
from .library_lambda_support import LibraryLambdaSupport
from .jit_support import JITSupport
from .library_support import LibrarySupport
# Terminate parallelization in the compiler (if init) during cleanup

View File

@@ -17,7 +17,7 @@ from .wrapper import WrapperCpp
class JITCompilationResult(WrapperCpp):
"""JITCompilationResult holds the result of a JIT compilation.
It can be instrumented using the JITLambdaSupport to load client parameters and execute the compiled
It can be instrumented using the JITSupport to load client parameters and execute the compiled
code.
"""

View File

@@ -13,10 +13,10 @@ from .wrapper import WrapperCpp
class JITLambda(WrapperCpp):
"""JITLambda contains an in-memory executable code and can be ran using JITLambdaSupport.
"""JITLambda contains an in-memory executable code and can be ran using JITSupport.
It's an artifact of JIT compilation, which stays in memory and can be executed with the help of
JITLambdaSupport.
JITSupport.
"""
def __init__(self, jit_lambda: _JITLambda):

View File

@@ -1,7 +1,7 @@
# Part of the Concrete Compiler Project, under the BSD3 License with Zama Exceptions.
# See https://github.com/zama-ai/concrete-compiler-internal/blob/master/LICENSE.txt for license information.
"""JITLambdaSupport.
"""JITSupport.
Just-in-time compilation provide a way to compile and execute an MLIR program while keeping the executable
code in memory.
@@ -11,7 +11,7 @@ from typing import Optional
# pylint: disable=no-name-in-module,import-error
from mlir._mlir_libs._concretelang._compiler import (
JITLambdaSupport as _JITLambdaSupport,
JITSupport as _JITSupport,
)
# pylint: enable=no-name-in-module,import-error
@@ -25,28 +25,28 @@ from .public_result import PublicResult
from .wrapper import WrapperCpp
class JITLambdaSupport(WrapperCpp):
class JITSupport(WrapperCpp):
"""Support class for JIT compilation and execution."""
def __init__(self, jit_lambda_support: _JITLambdaSupport):
def __init__(self, jit_support: _JITSupport):
"""Wrap the native Cpp object.
Args:
jit_lambda_support (_JITLambdaSupport): object to wrap
jit_support (_JITSupport): object to wrap
Raises:
TypeError: if jit_lambda_support is not of type _JITLambdaSupport
TypeError: if jit_support is not of type _JITSupport
"""
if not isinstance(jit_lambda_support, _JITLambdaSupport):
if not isinstance(jit_support, _JITSupport):
raise TypeError(
f"jit_lambda_support must be of type _JITLambdaSupport not{type(jit_lambda_support)}"
f"jit_support must be of type _JITSupport not{type(jit_support)}"
)
super().__init__(jit_lambda_support)
super().__init__(jit_support)
@staticmethod
# pylint: disable=arguments-differ
def new(runtime_lib_path: Optional[str] = None) -> "JITLambdaSupport":
"""Build a JITLambdaSupport.
def new(runtime_lib_path: Optional[str] = None) -> "JITSupport":
"""Build a JITSupport.
Args:
runtime_lib_path (Optional[str]): path to the runtime library. Defaults to None.
@@ -55,7 +55,7 @@ class JITLambdaSupport(WrapperCpp):
TypeError: if runtime_lib_path is not of type str or None
Returns:
JITLambdaSupport
JITSupport
"""
if runtime_lib_path is None:
runtime_lib_path = lookup_runtime_lib()
@@ -64,7 +64,7 @@ class JITLambdaSupport(WrapperCpp):
raise TypeError(
f"runtime_lib_path must be of type str, not {type(runtime_lib_path)}"
)
return JITLambdaSupport.wrap(_JITLambdaSupport(runtime_lib_path))
return JITSupport.wrap(_JITSupport(runtime_lib_path))
# pylint: enable=arguments-differ

View File

@@ -13,7 +13,7 @@ from .wrapper import WrapperCpp
class LibraryLambda(WrapperCpp):
"""LibraryLambda reference a compiled library and can be ran using LibraryLambdaSupport."""
"""LibraryLambda reference a compiled library and can be ran using LibrarySupport."""
def __init__(self, library_lambda: _LibraryLambda):
"""Wrap the native Cpp object.

View File

@@ -1,16 +1,16 @@
# Part of the Concrete Compiler Project, under the BSD3 License with Zama Exceptions.
# See https://github.com/zama-ai/concrete-compiler-internal/blob/master/LICENSE.txt for license information.
"""LibraryCompilerSupport.
"""LibrarySupport.
Library compilation provides a way to compile an MLIR program into a library that can be later loaded
Library support provides a way to compile an MLIR program into a library that can be later loaded
to execute the compiled code.
"""
import os
# pylint: disable=no-name-in-module,import-error
from mlir._mlir_libs._concretelang._compiler import (
LibraryLambdaSupport as _LibraryLambdaSupport,
LibrarySupport as _LibrarySupport,
)
# pylint: enable=no-name-in-module,import-error
@@ -29,24 +29,24 @@ DEFAULT_OUTPUT_PATH = os.path.abspath(
)
class LibraryLambdaSupport(WrapperCpp):
class LibrarySupport(WrapperCpp):
"""Support class for library compilation and execution."""
def __init__(self, library_lambda_support: _LibraryLambdaSupport):
def __init__(self, library_support: _LibrarySupport):
"""Wrap the native Cpp object.
Args:
library_lambda_support (_LibraryLambdaSupport): object to wrap
library_support (_LibrarySupport): object to wrap
Raises:
TypeError: if library_lambda_support is not of type _LibraryLambdaSupport
TypeError: if library_support is not of type _LibrarySupport
"""
if not isinstance(library_lambda_support, _LibraryLambdaSupport):
if not isinstance(library_support, _LibrarySupport):
raise TypeError(
f"library_lambda_support must be of type _LibraryLambdaSupport, not "
f"{type(library_lambda_support)}"
f"library_support must be of type _LibrarySupport, not "
f"{type(library_support)}"
)
super().__init__(library_lambda_support)
super().__init__(library_support)
self.library_path = DEFAULT_OUTPUT_PATH
@property
@@ -62,8 +62,8 @@ class LibraryLambdaSupport(WrapperCpp):
@staticmethod
# pylint: disable=arguments-differ
def new(output_path: str = DEFAULT_OUTPUT_PATH) -> "LibraryLambdaSupport":
"""Build a LibraryLambdaSupport.
def new(output_path: str = DEFAULT_OUTPUT_PATH) -> "LibrarySupport":
"""Build a LibrarySupport.
Args:
output_path (str, optional): path where to store compiled libraries.
@@ -73,15 +73,13 @@ class LibraryLambdaSupport(WrapperCpp):
TypeError: if output_path is not of type str
Returns:
LibraryLambdaSupport
LibrarySupport
"""
if not isinstance(output_path, str):
raise TypeError(f"output_path must be of type str, not {type(output_path)}")
library_lambda_support = LibraryLambdaSupport.wrap(
_LibraryLambdaSupport(output_path)
)
library_lambda_support.library_path = output_path
return library_lambda_support
library_support = LibrarySupport.wrap(_LibrarySupport(output_path))
library_support.library_path = output_path
return library_support
def compile(
self,

View File

@@ -9,8 +9,8 @@
#include "concretelang/ClientLib/KeySetCache.h"
#include "concretelang/Runtime/runtime_api.h"
#include "concretelang/Support/CompilerEngine.h"
#include "concretelang/Support/JITSupport.h"
#include "concretelang/Support/Jit.h"
#include "concretelang/Support/JitLambdaSupport.h"
#define GET_OR_THROW_LLVM_EXPECTED(VARNAME, EXPECTED) \
auto VARNAME = EXPECTED; \
@@ -20,16 +20,15 @@
// JIT Support bindings ///////////////////////////////////////////////////////
MLIR_CAPI_EXPORTED JITLambdaSupport_C
jit_lambda_support(std::string runtimeLibPath) {
MLIR_CAPI_EXPORTED JITSupport_C jit_support(std::string runtimeLibPath) {
auto opt = runtimeLibPath.empty()
? llvm::None
: llvm::Optional<std::string>(runtimeLibPath);
return JITLambdaSupport_C{mlir::concretelang::JitLambdaSupport(opt)};
return JITSupport_C{mlir::concretelang::JITSupport(opt)};
}
std::unique_ptr<mlir::concretelang::JitCompilationResult>
jit_compile(JITLambdaSupport_C support, const char *module,
jit_compile(JITSupport_C support, const char *module,
mlir::concretelang::CompilationOptions options) {
#ifndef CONCRETELANG_PARALLEL_EXECUTION_ENABLED
if (options.autoParallelize || options.loopParallelize ||
@@ -44,7 +43,7 @@ jit_compile(JITLambdaSupport_C support, const char *module,
}
MLIR_CAPI_EXPORTED mlir::concretelang::ClientParameters
jit_load_client_parameters(JITLambdaSupport_C support,
jit_load_client_parameters(JITSupport_C support,
mlir::concretelang::JitCompilationResult &result) {
GET_OR_THROW_LLVM_EXPECTED(clientParameters,
support.support.loadClientParameters(result));
@@ -52,7 +51,7 @@ jit_load_client_parameters(JITLambdaSupport_C support,
}
MLIR_CAPI_EXPORTED std::shared_ptr<mlir::concretelang::JITLambda>
jit_load_server_lambda(JITLambdaSupport_C support,
jit_load_server_lambda(JITSupport_C support,
mlir::concretelang::JitCompilationResult &result) {
GET_OR_THROW_LLVM_EXPECTED(serverLambda,
support.support.loadServerLambda(result));
@@ -60,22 +59,19 @@ jit_load_server_lambda(JITLambdaSupport_C support,
}
MLIR_CAPI_EXPORTED std::unique_ptr<concretelang::clientlib::PublicResult>
jit_server_call(JITLambdaSupport_C support,
mlir::concretelang::JITLambda &lambda,
jit_server_call(JITSupport_C support, mlir::concretelang::JITLambda &lambda,
concretelang::clientlib::PublicArguments &args) {
GET_OR_THROW_LLVM_EXPECTED(publicResult, lambda.call(args));
return std::move(*publicResult);
}
// Library Support bindings ///////////////////////////////////////////////////
MLIR_CAPI_EXPORTED LibraryLambdaSupport_C
library_lambda_support(const char *outputPath) {
return LibraryLambdaSupport_C{
mlir::concretelang::LibraryLambdaSupport(outputPath)};
MLIR_CAPI_EXPORTED LibrarySupport_C library_support(const char *outputPath) {
return LibrarySupport_C{mlir::concretelang::LibrarySupport(outputPath)};
}
std::unique_ptr<mlir::concretelang::LibraryCompilationResult>
library_compile(LibraryLambdaSupport_C support, const char *module,
library_compile(LibrarySupport_C support, const char *module,
mlir::concretelang::CompilationOptions options) {
#ifndef CONCRETELANG_PARALLEL_EXECUTION_ENABLED
if (options.autoParallelize || options.loopParallelize ||
@@ -91,7 +87,7 @@ library_compile(LibraryLambdaSupport_C support, const char *module,
MLIR_CAPI_EXPORTED mlir::concretelang::ClientParameters
library_load_client_parameters(
LibraryLambdaSupport_C support,
LibrarySupport_C support,
mlir::concretelang::LibraryCompilationResult &result) {
GET_OR_THROW_LLVM_EXPECTED(clientParameters,
support.support.loadClientParameters(result));
@@ -100,7 +96,7 @@ library_load_client_parameters(
MLIR_CAPI_EXPORTED concretelang::serverlib::ServerLambda
library_load_server_lambda(
LibraryLambdaSupport_C support,
LibrarySupport_C support,
mlir::concretelang::LibraryCompilationResult &result) {
GET_OR_THROW_LLVM_EXPECTED(serverLambda,
support.support.loadServerLambda(result));
@@ -108,7 +104,7 @@ library_load_server_lambda(
}
MLIR_CAPI_EXPORTED std::unique_ptr<concretelang::clientlib::PublicResult>
library_server_call(LibraryLambdaSupport_C support,
library_server_call(LibrarySupport_C support,
concretelang::serverlib::ServerLambda lambda,
concretelang::clientlib::PublicArguments &args) {
GET_OR_THROW_LLVM_EXPECTED(publicResult,

View File

@@ -2,7 +2,7 @@ add_mlir_library(ConcretelangSupport
Pipeline.cpp
Jit.cpp
CompilerEngine.cpp
JitLambdaSupport.cpp
JITSupport.cpp
LambdaArgument.cpp
V0Parameters.cpp
V0Curves.cpp

View File

@@ -3,19 +3,18 @@
// https://github.com/zama-ai/concrete-compiler-internal/blob/master/LICENSE.txt
// for license information.
#include <concretelang/Support/JitLambdaSupport.h>
#include <concretelang/Support/JITSupport.h>
#include <llvm/Support/TargetSelect.h>
#include <mlir/Target/LLVMIR/Dialect/LLVMIR/LLVMToLLVMIRTranslation.h>
namespace mlir {
namespace concretelang {
JitLambdaSupport::JitLambdaSupport(llvm::Optional<std::string> runtimeLibPath)
JITSupport::JITSupport(llvm::Optional<std::string> runtimeLibPath)
: runtimeLibPath(runtimeLibPath) {}
llvm::Expected<std::unique_ptr<JitCompilationResult>>
JitLambdaSupport::compile(llvm::SourceMgr &program,
CompilationOptions options) {
JITSupport::compile(llvm::SourceMgr &program, CompilationOptions options) {
// Setup the compiler engine
auto context = std::make_shared<CompilationContext>();
concretelang::CompilerEngine engine(context);

View File

@@ -33,7 +33,7 @@
#include "concretelang/Runtime/runtime_api.h"
#include "concretelang/Support/CompilerEngine.h"
#include "concretelang/Support/Error.h"
#include "concretelang/Support/JitLambdaSupport.h"
#include "concretelang/Support/JITSupport.h"
#include "concretelang/Support/LLVMEmitFile.h"
#include "concretelang/Support/Pipeline.h"
#include "concretelang/Support/logging.h"
@@ -292,9 +292,9 @@ mlir::LogicalResult processInputBuffer(
if (action == Action::JIT_INVOKE) {
auto lambdaOrErr =
mlir::concretelang::ClientServer<mlir::concretelang::JitLambdaSupport>::
mlir::concretelang::ClientServer<mlir::concretelang::JITSupport>::
create(buffer->getBuffer(), options, keySetCache,
mlir::concretelang::JitLambdaSupport());
mlir::concretelang::JITSupport());
llvm::Expected<uint64_t> resOrErr = (*lambdaOrErr)(jitArgs);

View File

@@ -3,9 +3,7 @@ import tempfile
import pytest
import numpy as np
from concrete.compiler import JITLambdaSupport, LibraryLambdaSupport
from concrete.compiler import ClientSupport
from concrete.compiler import KeySetCache
from concrete.compiler import JITSupport, LibrarySupport, ClientSupport, KeySetCache
KEY_SET_CACHE_PATH = os.path.join(tempfile.gettempdir(), "KeySetCache")
@@ -244,19 +242,19 @@ end_to_end_fixture = [
@pytest.mark.parametrize("mlir_input, args, expected_result", end_to_end_fixture)
def test_jit_compile_and_run(mlir_input, args, expected_result):
engine = JITLambdaSupport.new()
engine = JITSupport.new()
compile_and_run(engine, mlir_input, args, expected_result)
@pytest.mark.parametrize("mlir_input, args, expected_result", end_to_end_fixture)
def test_lib_compile_and_run(mlir_input, args, expected_result):
engine = LibraryLambdaSupport.new("py_test_lib_compile_and_run")
engine = LibrarySupport.new("py_test_lib_compile_and_run")
compile_and_run(engine, mlir_input, args, expected_result)
@pytest.mark.parametrize("mlir_input, args, expected_result", end_to_end_fixture)
def test_lib_compile_reload_and_run(mlir_input, args, expected_result):
engine = LibraryLambdaSupport.new("test_lib_compile_reload_and_run")
engine = LibrarySupport.new("test_lib_compile_reload_and_run")
# Here don't save compilation result, reload
engine.compile(mlir_input)
compilation_result = engine.reload()
@@ -293,7 +291,7 @@ def test_lib_compile_reload_and_run(mlir_input, args, expected_result):
],
)
def test_compile_and_run_invalid_arg_number(mlir_input, args):
engine = JITLambdaSupport.new()
engine = JITSupport.new()
with pytest.raises(
RuntimeError, match=r"function has arity 2 but is applied to too many arguments"
):
@@ -318,7 +316,7 @@ def test_compile_and_run_invalid_arg_number(mlir_input, args):
],
)
def test_compile_and_run_tlu(mlir_input, args, expected_result):
engine = JITLambdaSupport.new()
engine = JITSupport.new()
compile_and_run(engine, mlir_input, args, expected_result)
@@ -339,7 +337,7 @@ def test_compile_and_run_tlu(mlir_input, args, expected_result):
],
)
def test_compile_invalid(mlir_input):
engine = JITLambdaSupport.new()
engine = JITSupport.new()
with pytest.raises(
RuntimeError, match=r"cannot find the function for generate client parameters"
):

View File

@@ -4,10 +4,7 @@ import tempfile
import pytest
import numpy as np
from concrete.compiler.client_support import ClientSupport
from concrete.compiler.compilation_options import CompilationOptions
from concrete.compiler.jit_lambda_support import JITLambdaSupport
from concrete.compiler.key_set_cache import KeySetCache
from concrete.compiler import ClientSupport, CompilationOptions, JITSupport, KeySetCache
KEY_SET_CACHE_PATH = os.path.join(tempfile.gettempdir(), "KeySetCache")
@@ -69,5 +66,5 @@ def compile_and_run(engine, mlir_input, args, expected_result):
],
)
def test_compile_and_run_parallel(mlir_input, args, expected_result):
engine = JITLambdaSupport.new()
engine = JITSupport.new()
compile_and_run(engine, mlir_input, args, expected_result)

View File

@@ -4,8 +4,8 @@
#include <type_traits>
#include "EndToEndFixture.h"
#include "concretelang/Support/JitLambdaSupport.h"
#include "concretelang/Support/LibraryLambdaSupport.h"
#include "concretelang/Support/JITSupport.h"
#include "concretelang/Support/LibrarySupport.h"
template <typename LambdaSupport>
void compile_and_run(EndToEndDesc desc, LambdaSupport support) {
@@ -91,10 +91,9 @@ void compile_and_run(EndToEndDesc desc, LambdaSupport support) {
/// Instantiate the test suite for Jit
INSTANTIATE_END_TO_END_TEST_SUITE_FROM_ALL_TEST_FILES(
JitTest, mlir::concretelang::JitLambdaSupport())
JitTest, mlir::concretelang::JITSupport())
/// Instantiate the test suite for Jit
INSTANTIATE_END_TO_END_TEST_SUITE_FROM_ALL_TEST_FILES(
LibraryTest,
mlir::concretelang::LibraryLambdaSupport("/tmp/end_to_end_test_" +
desc.description))
LibraryTest, mlir::concretelang::LibrarySupport("/tmp/end_to_end_test_" +
desc.description))

View File

@@ -5,7 +5,7 @@
#include "concretelang/ClientLib/KeySetCache.h"
#include "concretelang/Support/CompilerEngine.h"
#include "concretelang/Support/JitLambdaSupport.h"
#include "concretelang/Support/JITSupport.h"
#include "llvm/Support/Path.h"
#include "globals.h"
@@ -128,7 +128,7 @@ getTestKeySetCachePtr() {
// returns the corresponding lambda. Any compilation errors are caught
// and reult in abnormal termination.
inline llvm::Expected<
mlir::concretelang::ClientServer<mlir::concretelang::JitLambdaSupport>>
mlir::concretelang::ClientServer<mlir::concretelang::JITSupport>>
internalCheckedJit(llvm::StringRef src, llvm::StringRef func = "main",
bool useDefaultFHEConstraints = false,
bool dataflowParallelize = false,
@@ -151,9 +151,8 @@ internalCheckedJit(llvm::StringRef src, llvm::StringRef func = "main",
#endif
auto lambdaOrErr =
mlir::concretelang::ClientServer<mlir::concretelang::JitLambdaSupport>::
create(src, options, getTestKeySetCache(),
mlir::concretelang::JitLambdaSupport());
mlir::concretelang::ClientServer<mlir::concretelang::JITSupport>::create(
src, options, getTestKeySetCache(), mlir::concretelang::JITSupport());
return lambdaOrErr;
}