1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi | FileCheck %s --check-prefixes=CHECK,BMI1
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+bmi2 | FileCheck %s --check-prefixes=CHECK,BMI2
5 declare i64 @llvm.x86.bmi.bextr.64(i64, i64)
7 define i64 @bextr64(i64 %x, i64 %y) {
8 ; CHECK-LABEL: bextr64:
10 ; CHECK-NEXT: bextrq %rsi, %rdi, %rax
12 %tmp = tail call i64 @llvm.x86.bmi.bextr.64(i64 %x, i64 %y)
16 define i64 @bextr64b(i64 %x) uwtable ssp {
17 ; CHECK-LABEL: bextr64b:
19 ; CHECK-NEXT: movl $3076, %eax # imm = 0xC04
20 ; CHECK-NEXT: bextrl %eax, %edi, %eax
27 ; Make sure we still use the AH subreg trick to extract 15:8
28 define i64 @bextr64_subreg(i64 %x) uwtable ssp {
29 ; CHECK-LABEL: bextr64_subreg:
31 ; CHECK-NEXT: movq %rdi, %rax
32 ; CHECK-NEXT: movzbl %ah, %eax
39 define i64 @bextr64b_load(i64* %x) {
40 ; CHECK-LABEL: bextr64b_load:
42 ; CHECK-NEXT: movl $3076, %eax # imm = 0xC04
43 ; CHECK-NEXT: bextrl %eax, (%rdi), %eax
45 %1 = load i64, i64* %x, align 8
52 define i64 @bextr64c(i64 %x, i32 %y) {
53 ; CHECK-LABEL: bextr64c:
55 ; CHECK-NEXT: # kill: def $esi killed $esi def $rsi
56 ; CHECK-NEXT: bextrq %rsi, %rdi, %rax
58 %tmp0 = sext i32 %y to i64
59 %tmp1 = tail call i64 @llvm.x86.bmi.bextr.64(i64 %x, i64 %tmp0)
63 define i64 @bextr64d(i64 %a) {
64 ; CHECK-LABEL: bextr64d:
65 ; CHECK: # %bb.0: # %entry
66 ; CHECK-NEXT: movl $8450, %eax # imm = 0x2102
67 ; CHECK-NEXT: bextrq %rax, %rdi, %rax
71 %and = and i64 %shr, 8589934591
75 define i64 @non_bextr64(i64 %x) {
76 ; CHECK-LABEL: non_bextr64:
77 ; CHECK: # %bb.0: # %entry
78 ; CHECK-NEXT: shrq $2, %rdi
79 ; CHECK-NEXT: movabsq $8589934590, %rax # imm = 0x1FFFFFFFE
80 ; CHECK-NEXT: andq %rdi, %rax
84 %and = and i64 %shr, 8589934590