[MLIR][ExecutionEngine] Enable PIC option (#170995)

This PR enables the MLIR execution engine to dump object file as PIC
code, which is needed when the object file is later bundled into a dynamic
shared library.

---------

Co-authored-by: Mehdi Amini <joker.eph@gmail.com>
This commit is contained in:
Tianqi Chen
2025-12-07 12:07:17 -05:00
committed by GitHub
parent 3fc7419236
commit 11fd760e3a
6 changed files with 18 additions and 11 deletions

View File

@@ -41,10 +41,13 @@ DEFINE_C_API_STRUCT(MlirExecutionEngine, void);
/// generation. The number and array of paths corresponding to shared libraries
/// that will be loaded are specified via `numPaths` and `sharedLibPaths`
/// respectively.
/// The `enablePIC` arguments controls the relocation model, when true the
/// generated code is emitted as "position independent", making it possible to
/// save it and reload it as a shared object in another process.
/// TODO: figure out other options.
MLIR_CAPI_EXPORTED MlirExecutionEngine mlirExecutionEngineCreate(
MlirModule op, int optLevel, int numPaths,
const MlirStringRef *sharedLibPaths, bool enableObjectDump);
const MlirStringRef *sharedLibPaths, bool enableObjectDump, bool enablePIC);
/// Initialize the ExecutionEngine. Global constructors specified by
/// `llvm.mlir.global_ctors` will be run. One common scenario is that kernel

View File

@@ -75,13 +75,13 @@ NB_MODULE(_mlirExecutionEngine, m) {
"__init__",
[](PyExecutionEngine &self, MlirModule module, int optLevel,
const std::vector<std::string> &sharedLibPaths,
bool enableObjectDump) {
bool enableObjectDump, bool enablePIC) {
llvm::SmallVector<MlirStringRef, 4> libPaths;
for (const std::string &path : sharedLibPaths)
libPaths.push_back({path.c_str(), path.length()});
MlirExecutionEngine executionEngine =
mlirExecutionEngineCreate(module, optLevel, libPaths.size(),
libPaths.data(), enableObjectDump);
MlirExecutionEngine executionEngine = mlirExecutionEngineCreate(
module, optLevel, libPaths.size(), libPaths.data(),
enableObjectDump, enablePIC);
if (mlirExecutionEngineIsNull(executionEngine))
throw std::runtime_error(
"Failure while creating the ExecutionEngine.");
@@ -89,7 +89,7 @@ NB_MODULE(_mlirExecutionEngine, m) {
},
nb::arg("module"), nb::arg("opt_level") = 2,
nb::arg("shared_libs") = nb::list(),
nb::arg("enable_object_dump") = true,
nb::arg("enable_object_dump") = true, nb::arg("enable_pic") = false,
"Create a new ExecutionEngine instance for the given Module. The "
"module must contain only dialects that can be translated to LLVM. "
"Perform transformations and code generation at the optimization "

View File

@@ -22,7 +22,7 @@ using namespace mlir;
extern "C" MlirExecutionEngine
mlirExecutionEngineCreate(MlirModule op, int optLevel, int numPaths,
const MlirStringRef *sharedLibPaths,
bool enableObjectDump) {
bool enableObjectDump, bool enablePIC) {
static bool initOnce = [] {
llvm::InitializeNativeTarget();
llvm::InitializeNativeTargetAsmParser(); // needed for inline_asm
@@ -43,6 +43,8 @@ mlirExecutionEngineCreate(MlirModule op, int optLevel, int numPaths,
consumeError(tmBuilderOrError.takeError());
return MlirExecutionEngine{nullptr};
}
if (enablePIC)
tmBuilderOrError->setRelocationModel(llvm::Reloc::PIC_);
auto tmOrError = tmBuilderOrError->createTargetMachine();
if (!tmOrError) {
llvm::errs() << "Failed to create a TargetMachine for the host because: \n";
@@ -63,7 +65,8 @@ mlirExecutionEngineCreate(MlirModule op, int optLevel, int numPaths,
jitOptions.jitCodeGenOptLevel = static_cast<llvm::CodeGenOptLevel>(optLevel);
jitOptions.sharedLibPaths = libPaths;
jitOptions.enableObjectDump = enableObjectDump;
auto jitOrError = ExecutionEngine::create(unwrap(op), jitOptions);
auto jitOrError = ExecutionEngine::create(unwrap(op), jitOptions,
std::move(tmOrError.get()));
if (!jitOrError) {
llvm::errs() << "Failed to create an ExecutionEngine because: \n";
consumeError(jitOrError.takeError());

View File

@@ -69,7 +69,7 @@ void testSimpleExecution(void) {
mlirRegisterAllLLVMTranslations(ctx);
MlirExecutionEngine jit = mlirExecutionEngineCreate(
module, /*optLevel=*/2, /*numPaths=*/0, /*sharedLibPaths=*/NULL,
/*enableObjectDump=*/false);
/*enableObjectDump=*/false, /*enablePIC=*/false);
if (mlirExecutionEngineIsNull(jit)) {
fprintf(stderr, "Execution engine creation failed");
exit(2);
@@ -125,7 +125,7 @@ void testOmpCreation(void) {
// against the OpenMP library.
MlirExecutionEngine jit = mlirExecutionEngineCreate(
module, /*optLevel=*/2, /*numPaths=*/0, /*sharedLibPaths=*/NULL,
/*enableObjectDump=*/false);
/*enableObjectDump=*/false, /*enablePIC=*/false);
if (mlirExecutionEngineIsNull(jit)) {
fprintf(stderr, "Engine creation failed with OpenMP");
exit(2);

View File

@@ -79,7 +79,7 @@ void testGlobalCtorJitCallback(void) {
// Create execution engine with initialization disabled
MlirExecutionEngine jit = mlirExecutionEngineCreate(
module, /*optLevel=*/2, /*numPaths=*/0, /*sharedLibPaths=*/NULL,
/*enableObjectDump=*/false);
/*enableObjectDump=*/false, /*enablePIC=*/false);
if (mlirExecutionEngineIsNull(jit)) {
fprintf(stderr, "Execution engine creation failed");

View File

@@ -807,6 +807,7 @@ def testDumpToObjectFile():
# because RTDyldObjectLinkingLayer::emit will try to resolve symbols before dumping
# (see the jitLinkForORC call at the bottom there).
shared_libs=[MLIR_C_RUNNER_UTILS],
enable_pic=True,
)
# CHECK: Object file exists: True