From 758c2f65805564e0c51cccaacf8307e52a9e312b Mon Sep 17 00:00:00 2001 From: Serban Constantinescu Date: Sun, 22 Feb 2015 22:08:01 +0000 Subject: [PATCH] Opt Compiler: Materialise constants that cannot be encoded The VIXL MacroAssembler deals gracefully with any immediate. However when the constant has multiple uses and cannot be encoded in the instruction's immediate field we are better off using a register for the constant and thus sharing the constant generation between multiple uses. Eg: var += #Const; // #Const cannot be encoded. var += #Const; Before: After: mov wip0, #Const mov w4, #Const add w0, w0, wip0 add w0, w0, w4 mov wip0, #Const add w0, w0, w4 add w0, w0, wip0 Change-Id: I8d1f620872d1241cf582fb4f3b45b5091b790146 Signed-off-by: Serban Constantinescu --- compiler/optimizing/code_generator_arm64.cc | 9 ++++---- compiler/optimizing/common_arm64.h | 35 +++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 4 deletions(-) diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index c21084a6f..6b4c2f065 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -63,6 +63,7 @@ using helpers::StackOperandFrom; using helpers::VIXLRegCodeFromART; using helpers::WRegisterFrom; using helpers::XRegisterFrom; +using helpers::ARM64EncodableConstantOrRegister; static constexpr size_t kHeapRefSize = sizeof(mirror::HeapReference); static constexpr int kCurrentMethodStackOffset = 0; @@ -1104,7 +1105,7 @@ void LocationsBuilderARM64::HandleBinaryOp(HBinaryOperation* instr) { case Primitive::kPrimInt: case Primitive::kPrimLong: locations->SetInAt(0, Location::RequiresRegister()); - locations->SetInAt(1, Location::RegisterOrConstant(instr->InputAt(1))); + locations->SetInAt(1, ARM64EncodableConstantOrRegister(instr->InputAt(1), instr)); locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); break; @@ -1395,7 +1396,7 @@ void LocationsBuilderARM64::VisitCompare(HCompare* compare) { switch (in_type) { case Primitive::kPrimLong: { locations->SetInAt(0, Location::RequiresRegister()); - locations->SetInAt(1, Location::RegisterOrConstant(compare->InputAt(1))); + locations->SetInAt(1, ARM64EncodableConstantOrRegister(compare->InputAt(1), compare)); locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); break; } @@ -1465,7 +1466,7 @@ void InstructionCodeGeneratorARM64::VisitCompare(HCompare* compare) { void LocationsBuilderARM64::VisitCondition(HCondition* instruction) { LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction); locations->SetInAt(0, Location::RequiresRegister()); - locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); + locations->SetInAt(1, ARM64EncodableConstantOrRegister(instruction->InputAt(1), instruction)); if (instruction->NeedsMaterialization()) { locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); } @@ -2116,7 +2117,7 @@ void LocationsBuilderARM64::VisitNeg(HNeg* neg) { switch (neg->GetResultType()) { case Primitive::kPrimInt: case Primitive::kPrimLong: - locations->SetInAt(0, Location::RegisterOrConstant(neg->InputAt(0))); + locations->SetInAt(0, ARM64EncodableConstantOrRegister(neg->InputAt(0), neg)); locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); break; diff --git a/compiler/optimizing/common_arm64.h b/compiler/optimizing/common_arm64.h index 9447d3b81..056deb98d 100644 --- a/compiler/optimizing/common_arm64.h +++ b/compiler/optimizing/common_arm64.h @@ -183,6 +183,41 @@ static inline vixl::Operand OperandFromMemOperand(const vixl::MemOperand& mem_op } } +static bool CanEncodeConstantAsImmediate(HConstant* constant, HInstruction* instr) { + DCHECK(constant->IsIntConstant() || constant->IsLongConstant()); + + // For single uses we let VIXL handle the constant generation since it will + // use registers that are not managed by the register allocator (wip0, wip1). + if (constant->GetUses().HasOnlyOneUse()) { + return true; + } + + int64_t value = constant->IsIntConstant() ? constant->AsIntConstant()->GetValue() + : constant->AsLongConstant()->GetValue(); + + if (instr->IsAdd() || instr->IsSub() || instr->IsCondition() || instr->IsCompare()) { + // Uses aliases of ADD/SUB instructions. + return vixl::Assembler::IsImmAddSub(value); + } else if (instr->IsAnd() || instr->IsOr() || instr->IsXor()) { + // Uses logical operations. + return vixl::Assembler::IsImmLogical(value, vixl::kXRegSize); + } else { + DCHECK(instr->IsNeg()); + // Uses mov -immediate. + return vixl::Assembler::IsImmMovn(value, vixl::kXRegSize); + } +} + +static inline Location ARM64EncodableConstantOrRegister(HInstruction* constant, + HInstruction* instr) { + if (constant->IsConstant() + && CanEncodeConstantAsImmediate(constant->AsConstant(), instr)) { + return Location::ConstantLocation(constant->AsConstant()); + } + + return Location::RequiresRegister(); +} + } // namespace helpers } // namespace arm64 } // namespace art -- 2.11.0