I'm trying to use the PEXT x86 intrinsics from LLVM (see patch below), but I get the following error when I call it:
julia> using Core.Intrinsics
julia> pext(x::Uint32, y::Uint32) = box(Uint32, pext32(unbox(Uint32, x), unbox(Uint32, y)))
pext (generic function with 1 method)
julia> pext(0x8daf8af4, 0b00000111_00111111_00111111_00111111)
LLVM ERROR: Program used external function 'llvm.x86.bmi.pext.32.i32' which could not be resolved!
This seems like an issue with how LLVM is linked. The Intrinsic::x86_bmi_pext_32 intrinsic does seem to exist; does anyone have any idea what I have to do to allow LLVM to find it?
By way of motivation, this instruction may allow freakishly fast UTF-8 decoding if it does what I think it does and I can get it to work.
----------------
diff --git a/src/intrinsics.cpp b/src/intrinsics.cpp
index e4efcdd..7c16c5f 100644
--- a/src/intrinsics.cpp
+++ b/src/intrinsics.cpp
@@ -38,6 +38,7 @@ namespace JL_I {
sqrt_llvm, powi_llvm,
// byte vectors
bytevec_ref, bytevec_ref32, bytevec_utf8_ref,
+ pext32,
// pointer access
pointerref, pointerset, pointertoref,
// c interface
@@ -1009,6 +1010,14 @@ static Value *emit_intrinsic(intrinsic f, jl_value_t **args, size_t nargs,
Value *den;
Value *typemin;
switch (f) {
+ HANDLE(pext32,2) {
+ return builder.CreateCall2(
+ Intrinsic::getDeclaration(
+ jl_Module,
+ Intrinsic::x86_bmi_pext_32,
+ ArrayRef<Type*>(T_uint32)
+ ), JL_INT(x), JL_INT(y));
+ }
HANDLE(bytevec_ref,2) {
Value *b = JL_INT(x);
Value *i = builder.CreateSub(JL_INT(y), ConstantInt::get(T_size, 1));
@@ -1793,6 +1802,7 @@ extern "C" void jl_init_intrinsic_functions(void)
ADD_I(flipsign_int); ADD_I(select_value); ADD_I(sqrt_llvm);
ADD_I(powi_llvm);
ADD_I(bytevec_ref); ADD_I(bytevec_ref32); ADD_I(bytevec_utf8_ref);
+ ADD_I(pext32);
ADD_I(pointerref); ADD_I(pointerset); ADD_I(pointertoref);
ADD_I(checked_sadd); ADD_I(checked_uadd);
ADD_I(checked_ssub); ADD_I(checked_usub);