// Promote the i8 variants and force them on up to i32 which has a shorter
// encoding.
- setOperationAction(ISD::CTTZ , MVT::i8 , Promote);
- AddPromotedToType (ISD::CTTZ , MVT::i8 , MVT::i32);
- setOperationAction(ISD::CTTZ_ZERO_UNDEF , MVT::i8 , Promote);
- AddPromotedToType (ISD::CTTZ_ZERO_UNDEF , MVT::i8 , MVT::i32);
+ setOperationPromotedToType(ISD::CTTZ , MVT::i8 , MVT::i32);
+ setOperationPromotedToType(ISD::CTTZ_ZERO_UNDEF, MVT::i8 , MVT::i32);
if (Subtarget.hasBMI()) {
setOperationAction(ISD::CTTZ_ZERO_UNDEF, MVT::i16 , Expand);
setOperationAction(ISD::CTTZ_ZERO_UNDEF, MVT::i32 , Expand);
if (Subtarget.hasLZCNT()) {
// When promoting the i8 variants, force them to i32 for a shorter
// encoding.
- setOperationAction(ISD::CTLZ , MVT::i8 , Promote);
- AddPromotedToType (ISD::CTLZ , MVT::i8 , MVT::i32);
- setOperationAction(ISD::CTLZ_ZERO_UNDEF, MVT::i8 , Promote);
- AddPromotedToType (ISD::CTLZ_ZERO_UNDEF, MVT::i8 , MVT::i32);
+ setOperationPromotedToType(ISD::CTLZ , MVT::i8 , MVT::i32);
+ setOperationPromotedToType(ISD::CTLZ_ZERO_UNDEF, MVT::i8 , MVT::i32);
setOperationAction(ISD::CTLZ_ZERO_UNDEF, MVT::i16 , Expand);
setOperationAction(ISD::CTLZ_ZERO_UNDEF, MVT::i32 , Expand);
if (Subtarget.is64Bit())
// Promote v16i8, v8i16, v4i32 load, select, and, or, xor to v2i64.
for (auto VT : { MVT::v16i8, MVT::v8i16, MVT::v4i32 }) {
- setOperationAction(ISD::AND, VT, Promote);
- AddPromotedToType (ISD::AND, VT, MVT::v2i64);
- setOperationAction(ISD::OR, VT, Promote);
- AddPromotedToType (ISD::OR, VT, MVT::v2i64);
- setOperationAction(ISD::XOR, VT, Promote);
- AddPromotedToType (ISD::XOR, VT, MVT::v2i64);
- setOperationAction(ISD::LOAD, VT, Promote);
- AddPromotedToType (ISD::LOAD, VT, MVT::v2i64);
- setOperationAction(ISD::SELECT, VT, Promote);
- AddPromotedToType (ISD::SELECT, VT, MVT::v2i64);
+ setOperationPromotedToType(ISD::AND, VT, MVT::v2i64);
+ setOperationPromotedToType(ISD::OR, VT, MVT::v2i64);
+ setOperationPromotedToType(ISD::XOR, VT, MVT::v2i64);
+ setOperationPromotedToType(ISD::LOAD, VT, MVT::v2i64);
+ setOperationPromotedToType(ISD::SELECT, VT, MVT::v2i64);
}
// Custom lower v2i64 and v2f64 selects.
// Promote v32i8, v16i16, v8i32 select, and, or, xor to v4i64.
for (auto VT : { MVT::v32i8, MVT::v16i16, MVT::v8i32 }) {
- setOperationAction(ISD::AND, VT, Promote);
- AddPromotedToType (ISD::AND, VT, MVT::v4i64);
- setOperationAction(ISD::OR, VT, Promote);
- AddPromotedToType (ISD::OR, VT, MVT::v4i64);
- setOperationAction(ISD::XOR, VT, Promote);
- AddPromotedToType (ISD::XOR, VT, MVT::v4i64);
- setOperationAction(ISD::LOAD, VT, Promote);
- AddPromotedToType (ISD::LOAD, VT, MVT::v4i64);
- setOperationAction(ISD::SELECT, VT, Promote);
- AddPromotedToType (ISD::SELECT, VT, MVT::v4i64);
+ setOperationPromotedToType(ISD::AND, VT, MVT::v4i64);
+ setOperationPromotedToType(ISD::OR, VT, MVT::v4i64);
+ setOperationPromotedToType(ISD::XOR, VT, MVT::v4i64);
+ setOperationPromotedToType(ISD::LOAD, VT, MVT::v4i64);
+ setOperationPromotedToType(ISD::SELECT, VT, MVT::v4i64);
}
}
}
}
for (auto VT : { MVT::v64i8, MVT::v32i16, MVT::v16i32 }) {
- setOperationAction(ISD::SELECT, VT, Promote);
- AddPromotedToType (ISD::SELECT, VT, MVT::v8i64);
+ setOperationPromotedToType(ISD::SELECT, VT, MVT::v8i64);
}
}// has AVX-512
setOperationAction(ISD::MLOAD, VT, Legal);
setOperationAction(ISD::MSTORE, VT, Legal);
- setOperationAction(ISD::AND, VT, Promote);
- AddPromotedToType (ISD::AND, VT, MVT::v8i64);
- setOperationAction(ISD::OR, VT, Promote);
- AddPromotedToType (ISD::OR, VT, MVT::v8i64);
- setOperationAction(ISD::XOR, VT, Promote);
- AddPromotedToType (ISD::XOR, VT, MVT::v8i64);
+ setOperationPromotedToType(ISD::AND, VT, MVT::v8i64);
+ setOperationPromotedToType(ISD::OR, VT, MVT::v8i64);
+ setOperationPromotedToType(ISD::XOR, VT, MVT::v8i64);
}
}