2 * Copyright (C) 2014 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #ifndef ART_COMPILER_OPTIMIZING_NODES_H_
18 #define ART_COMPILER_OPTIMIZING_NODES_H_
22 #include <type_traits>
24 #include "base/arena_bit_vector.h"
25 #include "base/arena_containers.h"
26 #include "base/arena_object.h"
27 #include "base/stl_util.h"
28 #include "dex/compiler_enums.h"
29 #include "entrypoints/quick/quick_entrypoints_enum.h"
31 #include "handle_scope.h"
32 #include "invoke_type.h"
33 #include "locations.h"
34 #include "method_reference.h"
35 #include "mirror/class.h"
37 #include "primitive.h"
38 #include "utils/array_ref.h"
39 #include "utils/intrusive_forward_list.h"
46 class HDoubleConstant;
60 class LocationSummary;
68 static const int kDefaultNumberOfBlocks = 8;
69 static const int kDefaultNumberOfSuccessors = 2;
70 static const int kDefaultNumberOfPredecessors = 2;
71 static const int kDefaultNumberOfExceptionalPredecessors = 0;
72 static const int kDefaultNumberOfDominatedBlocks = 1;
73 static const int kDefaultNumberOfBackEdges = 1;
75 // The maximum (meaningful) distance (31) that can be used in an integer shift/rotate operation.
76 static constexpr int32_t kMaxIntShiftDistance = 0x1f;
77 // The maximum (meaningful) distance (63) that can be used in a long shift/rotate operation.
78 static constexpr int32_t kMaxLongShiftDistance = 0x3f;
80 static constexpr uint32_t kUnknownFieldIndex = static_cast<uint32_t>(-1);
81 static constexpr uint16_t kUnknownClassDefIndex = static_cast<uint16_t>(-1);
83 static constexpr InvokeType kInvalidInvokeType = static_cast<InvokeType>(-1);
85 static constexpr uint32_t kNoDexPc = -1;
91 // Signed integers and floating-point numbers.
103 enum GraphAnalysisResult {
105 kAnalysisInvalidBytecode,
106 kAnalysisFailThrowCatchLoop,
107 kAnalysisFailAmbiguousArrayOp,
111 class HInstructionList : public ValueObject {
113 HInstructionList() : first_instruction_(nullptr), last_instruction_(nullptr) {}
115 void AddInstruction(HInstruction* instruction);
116 void RemoveInstruction(HInstruction* instruction);
118 // Insert `instruction` before/after an existing instruction `cursor`.
119 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
120 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
122 // Return true if this list contains `instruction`.
123 bool Contains(HInstruction* instruction) const;
125 // Return true if `instruction1` is found before `instruction2` in
126 // this instruction list and false otherwise. Abort if none
127 // of these instructions is found.
128 bool FoundBefore(const HInstruction* instruction1,
129 const HInstruction* instruction2) const;
131 bool IsEmpty() const { return first_instruction_ == nullptr; }
132 void Clear() { first_instruction_ = last_instruction_ = nullptr; }
134 // Update the block of all instructions to be `block`.
135 void SetBlockOfInstructions(HBasicBlock* block) const;
137 void AddAfter(HInstruction* cursor, const HInstructionList& instruction_list);
138 void AddBefore(HInstruction* cursor, const HInstructionList& instruction_list);
139 void Add(const HInstructionList& instruction_list);
141 // Return the number of instructions in the list. This is an expensive operation.
142 size_t CountSize() const;
145 HInstruction* first_instruction_;
146 HInstruction* last_instruction_;
148 friend class HBasicBlock;
150 friend class HInstruction;
151 friend class HInstructionIterator;
152 friend class HBackwardInstructionIterator;
154 DISALLOW_COPY_AND_ASSIGN(HInstructionList);
157 class ReferenceTypeInfo : ValueObject {
159 typedef Handle<mirror::Class> TypeHandle;
161 static ReferenceTypeInfo Create(TypeHandle type_handle, bool is_exact);
163 static ReferenceTypeInfo CreateUnchecked(TypeHandle type_handle, bool is_exact) {
164 return ReferenceTypeInfo(type_handle, is_exact);
167 static ReferenceTypeInfo CreateInvalid() { return ReferenceTypeInfo(); }
169 static bool IsValidHandle(TypeHandle handle) {
170 return handle.GetReference() != nullptr;
173 bool IsValid() const {
174 return IsValidHandle(type_handle_);
177 bool IsExact() const { return is_exact_; }
179 bool IsObjectClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
181 return GetTypeHandle()->IsObjectClass();
184 bool IsStringClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
186 return GetTypeHandle()->IsStringClass();
189 bool IsObjectArray() const SHARED_REQUIRES(Locks::mutator_lock_) {
191 return IsArrayClass() && GetTypeHandle()->GetComponentType()->IsObjectClass();
194 bool IsInterface() const SHARED_REQUIRES(Locks::mutator_lock_) {
196 return GetTypeHandle()->IsInterface();
199 bool IsArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
201 return GetTypeHandle()->IsArrayClass();
204 bool IsPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
206 return GetTypeHandle()->IsPrimitiveArray();
209 bool IsNonPrimitiveArrayClass() const SHARED_REQUIRES(Locks::mutator_lock_) {
211 return GetTypeHandle()->IsArrayClass() && !GetTypeHandle()->IsPrimitiveArray();
214 bool CanArrayHold(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
216 if (!IsExact()) return false;
217 if (!IsArrayClass()) return false;
218 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(rti.GetTypeHandle().Get());
221 bool CanArrayHoldValuesOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
223 if (!IsExact()) return false;
224 if (!IsArrayClass()) return false;
225 if (!rti.IsArrayClass()) return false;
226 return GetTypeHandle()->GetComponentType()->IsAssignableFrom(
227 rti.GetTypeHandle()->GetComponentType());
230 Handle<mirror::Class> GetTypeHandle() const { return type_handle_; }
232 bool IsSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
234 DCHECK(rti.IsValid());
235 return GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
238 bool IsStrictSupertypeOf(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
240 DCHECK(rti.IsValid());
241 return GetTypeHandle().Get() != rti.GetTypeHandle().Get() &&
242 GetTypeHandle()->IsAssignableFrom(rti.GetTypeHandle().Get());
245 // Returns true if the type information provide the same amount of details.
246 // Note that it does not mean that the instructions have the same actual type
247 // (because the type can be the result of a merge).
248 bool IsEqual(ReferenceTypeInfo rti) const SHARED_REQUIRES(Locks::mutator_lock_) {
249 if (!IsValid() && !rti.IsValid()) {
250 // Invalid types are equal.
253 if (!IsValid() || !rti.IsValid()) {
254 // One is valid, the other not.
257 return IsExact() == rti.IsExact()
258 && GetTypeHandle().Get() == rti.GetTypeHandle().Get();
262 ReferenceTypeInfo() : type_handle_(TypeHandle()), is_exact_(false) {}
263 ReferenceTypeInfo(TypeHandle type_handle, bool is_exact)
264 : type_handle_(type_handle), is_exact_(is_exact) { }
266 // The class of the object.
267 TypeHandle type_handle_;
268 // Whether or not the type is exact or a superclass of the actual type.
269 // Whether or not we have any information about this type.
273 std::ostream& operator<<(std::ostream& os, const ReferenceTypeInfo& rhs);
275 // Control-flow graph of a method. Contains a list of basic blocks.
276 class HGraph : public ArenaObject<kArenaAllocGraph> {
278 HGraph(ArenaAllocator* arena,
279 const DexFile& dex_file,
281 bool should_generate_constructor_barrier,
282 InstructionSet instruction_set,
283 InvokeType invoke_type = kInvalidInvokeType,
284 bool debuggable = false,
286 int start_instruction_id = 0)
288 blocks_(arena->Adapter(kArenaAllocBlockList)),
289 reverse_post_order_(arena->Adapter(kArenaAllocReversePostOrder)),
290 linear_order_(arena->Adapter(kArenaAllocLinearOrder)),
291 entry_block_(nullptr),
292 exit_block_(nullptr),
293 maximum_number_of_out_vregs_(0),
295 number_of_in_vregs_(0),
296 temporaries_vreg_slots_(0),
297 has_bounds_checks_(false),
298 has_try_catch_(false),
299 has_irreducible_loops_(false),
300 debuggable_(debuggable),
301 current_instruction_id_(start_instruction_id),
303 method_idx_(method_idx),
304 invoke_type_(invoke_type),
306 should_generate_constructor_barrier_(should_generate_constructor_barrier),
307 instruction_set_(instruction_set),
308 cached_null_constant_(nullptr),
309 cached_int_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
310 cached_float_constants_(std::less<int32_t>(), arena->Adapter(kArenaAllocConstantsMap)),
311 cached_long_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
312 cached_double_constants_(std::less<int64_t>(), arena->Adapter(kArenaAllocConstantsMap)),
313 cached_current_method_(nullptr),
314 inexact_object_rti_(ReferenceTypeInfo::CreateInvalid()),
316 blocks_.reserve(kDefaultNumberOfBlocks);
319 // Acquires and stores RTI of inexact Object to be used when creating HNullConstant.
320 void InitializeInexactObjectRTI(StackHandleScopeCollection* handles);
322 ArenaAllocator* GetArena() const { return arena_; }
323 const ArenaVector<HBasicBlock*>& GetBlocks() const { return blocks_; }
325 bool IsInSsaForm() const { return in_ssa_form_; }
326 void SetInSsaForm() { in_ssa_form_ = true; }
328 HBasicBlock* GetEntryBlock() const { return entry_block_; }
329 HBasicBlock* GetExitBlock() const { return exit_block_; }
330 bool HasExitBlock() const { return exit_block_ != nullptr; }
332 void SetEntryBlock(HBasicBlock* block) { entry_block_ = block; }
333 void SetExitBlock(HBasicBlock* block) { exit_block_ = block; }
335 void AddBlock(HBasicBlock* block);
337 void ComputeDominanceInformation();
338 void ClearDominanceInformation();
339 void ClearLoopInformation();
340 void FindBackEdges(ArenaBitVector* visited);
341 GraphAnalysisResult BuildDominatorTree();
343 void SimplifyCatchBlocks();
345 // Analyze all natural loops in this graph. Returns a code specifying that it
346 // was successful or the reason for failure. The method will fail if a loop
347 // is a throw-catch loop, i.e. the header is a catch block.
348 GraphAnalysisResult AnalyzeLoops() const;
350 // Iterate over blocks to compute try block membership. Needs reverse post
351 // order and loop information.
352 void ComputeTryBlockInformation();
354 // Inline this graph in `outer_graph`, replacing the given `invoke` instruction.
355 // Returns the instruction to replace the invoke expression or null if the
356 // invoke is for a void method. Note that the caller is responsible for replacing
357 // and removing the invoke instruction.
358 HInstruction* InlineInto(HGraph* outer_graph, HInvoke* invoke);
360 // Update the loop and try membership of `block`, which was spawned from `reference`.
361 // In case `reference` is a back edge, `replace_if_back_edge` notifies whether `block`
362 // should be the new back edge.
363 void UpdateLoopAndTryInformationOfNewBlock(HBasicBlock* block,
364 HBasicBlock* reference,
365 bool replace_if_back_edge);
367 // Need to add a couple of blocks to test if the loop body is entered and
368 // put deoptimization instructions, etc.
369 void TransformLoopHeaderForBCE(HBasicBlock* header);
371 // Removes `block` from the graph. Assumes `block` has been disconnected from
372 // other blocks and has no instructions or phis.
373 void DeleteDeadEmptyBlock(HBasicBlock* block);
375 // Splits the edge between `block` and `successor` while preserving the
376 // indices in the predecessor/successor lists. If there are multiple edges
377 // between the blocks, the lowest indices are used.
378 // Returns the new block which is empty and has the same dex pc as `successor`.
379 HBasicBlock* SplitEdge(HBasicBlock* block, HBasicBlock* successor);
381 void SplitCriticalEdge(HBasicBlock* block, HBasicBlock* successor);
382 void SimplifyLoop(HBasicBlock* header);
384 int32_t GetNextInstructionId() {
385 DCHECK_NE(current_instruction_id_, INT32_MAX);
386 return current_instruction_id_++;
389 int32_t GetCurrentInstructionId() const {
390 return current_instruction_id_;
393 void SetCurrentInstructionId(int32_t id) {
394 DCHECK_GE(id, current_instruction_id_);
395 current_instruction_id_ = id;
398 uint16_t GetMaximumNumberOfOutVRegs() const {
399 return maximum_number_of_out_vregs_;
402 void SetMaximumNumberOfOutVRegs(uint16_t new_value) {
403 maximum_number_of_out_vregs_ = new_value;
406 void UpdateMaximumNumberOfOutVRegs(uint16_t other_value) {
407 maximum_number_of_out_vregs_ = std::max(maximum_number_of_out_vregs_, other_value);
410 void UpdateTemporariesVRegSlots(size_t slots) {
411 temporaries_vreg_slots_ = std::max(slots, temporaries_vreg_slots_);
414 size_t GetTemporariesVRegSlots() const {
415 DCHECK(!in_ssa_form_);
416 return temporaries_vreg_slots_;
419 void SetNumberOfVRegs(uint16_t number_of_vregs) {
420 number_of_vregs_ = number_of_vregs;
423 uint16_t GetNumberOfVRegs() const {
424 return number_of_vregs_;
427 void SetNumberOfInVRegs(uint16_t value) {
428 number_of_in_vregs_ = value;
431 uint16_t GetNumberOfInVRegs() const {
432 return number_of_in_vregs_;
435 uint16_t GetNumberOfLocalVRegs() const {
436 DCHECK(!in_ssa_form_);
437 return number_of_vregs_ - number_of_in_vregs_;
440 const ArenaVector<HBasicBlock*>& GetReversePostOrder() const {
441 return reverse_post_order_;
444 const ArenaVector<HBasicBlock*>& GetLinearOrder() const {
445 return linear_order_;
448 bool HasBoundsChecks() const {
449 return has_bounds_checks_;
452 void SetHasBoundsChecks(bool value) {
453 has_bounds_checks_ = value;
456 bool ShouldGenerateConstructorBarrier() const {
457 return should_generate_constructor_barrier_;
460 bool IsDebuggable() const { return debuggable_; }
462 // Returns a constant of the given type and value. If it does not exist
463 // already, it is created and inserted into the graph. This method is only for
465 HConstant* GetConstant(Primitive::Type type, int64_t value, uint32_t dex_pc = kNoDexPc);
467 // TODO: This is problematic for the consistency of reference type propagation
468 // because it can be created anytime after the pass and thus it will be left
469 // with an invalid type.
470 HNullConstant* GetNullConstant(uint32_t dex_pc = kNoDexPc);
472 HIntConstant* GetIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc) {
473 return CreateConstant(value, &cached_int_constants_, dex_pc);
475 HLongConstant* GetLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc) {
476 return CreateConstant(value, &cached_long_constants_, dex_pc);
478 HFloatConstant* GetFloatConstant(float value, uint32_t dex_pc = kNoDexPc) {
479 return CreateConstant(bit_cast<int32_t, float>(value), &cached_float_constants_, dex_pc);
481 HDoubleConstant* GetDoubleConstant(double value, uint32_t dex_pc = kNoDexPc) {
482 return CreateConstant(bit_cast<int64_t, double>(value), &cached_double_constants_, dex_pc);
485 HCurrentMethod* GetCurrentMethod();
487 const DexFile& GetDexFile() const {
491 uint32_t GetMethodIdx() const {
495 InvokeType GetInvokeType() const {
499 InstructionSet GetInstructionSet() const {
500 return instruction_set_;
503 bool IsCompilingOsr() const { return osr_; }
505 bool HasTryCatch() const { return has_try_catch_; }
506 void SetHasTryCatch(bool value) { has_try_catch_ = value; }
508 bool HasIrreducibleLoops() const { return has_irreducible_loops_; }
509 void SetHasIrreducibleLoops(bool value) { has_irreducible_loops_ = value; }
511 ArtMethod* GetArtMethod() const { return art_method_; }
512 void SetArtMethod(ArtMethod* method) { art_method_ = method; }
514 // Returns an instruction with the opposite boolean value from 'cond'.
515 // The instruction has been inserted into the graph, either as a constant, or
517 HInstruction* InsertOppositeCondition(HInstruction* cond, HInstruction* cursor);
519 ReferenceTypeInfo GetInexactObjectRti() const { return inexact_object_rti_; }
522 void RemoveInstructionsAsUsersFromDeadBlocks(const ArenaBitVector& visited) const;
523 void RemoveDeadBlocks(const ArenaBitVector& visited);
525 template <class InstructionType, typename ValueType>
526 InstructionType* CreateConstant(ValueType value,
527 ArenaSafeMap<ValueType, InstructionType*>* cache,
528 uint32_t dex_pc = kNoDexPc) {
529 // Try to find an existing constant of the given value.
530 InstructionType* constant = nullptr;
531 auto cached_constant = cache->find(value);
532 if (cached_constant != cache->end()) {
533 constant = cached_constant->second;
536 // If not found or previously deleted, create and cache a new instruction.
537 // Don't bother reviving a previously deleted instruction, for simplicity.
538 if (constant == nullptr || constant->GetBlock() == nullptr) {
539 constant = new (arena_) InstructionType(value, dex_pc);
540 cache->Overwrite(value, constant);
541 InsertConstant(constant);
546 void InsertConstant(HConstant* instruction);
548 // Cache a float constant into the graph. This method should only be
549 // called by the SsaBuilder when creating "equivalent" instructions.
550 void CacheFloatConstant(HFloatConstant* constant);
552 // See CacheFloatConstant comment.
553 void CacheDoubleConstant(HDoubleConstant* constant);
555 ArenaAllocator* const arena_;
557 // List of blocks in insertion order.
558 ArenaVector<HBasicBlock*> blocks_;
560 // List of blocks to perform a reverse post order tree traversal.
561 ArenaVector<HBasicBlock*> reverse_post_order_;
563 // List of blocks to perform a linear order tree traversal.
564 ArenaVector<HBasicBlock*> linear_order_;
566 HBasicBlock* entry_block_;
567 HBasicBlock* exit_block_;
569 // The maximum number of virtual registers arguments passed to a HInvoke in this graph.
570 uint16_t maximum_number_of_out_vregs_;
572 // The number of virtual registers in this method. Contains the parameters.
573 uint16_t number_of_vregs_;
575 // The number of virtual registers used by parameters of this method.
576 uint16_t number_of_in_vregs_;
578 // Number of vreg size slots that the temporaries use (used in baseline compiler).
579 size_t temporaries_vreg_slots_;
581 // Has bounds checks. We can totally skip BCE if it's false.
582 bool has_bounds_checks_;
584 // Flag whether there are any try/catch blocks in the graph. We will skip
585 // try/catch-related passes if false.
588 // Flag whether there are any irreducible loops in the graph.
589 bool has_irreducible_loops_;
591 // Indicates whether the graph should be compiled in a way that
592 // ensures full debuggability. If false, we can apply more
593 // aggressive optimizations that may limit the level of debugging.
594 const bool debuggable_;
596 // The current id to assign to a newly added instruction. See HInstruction.id_.
597 int32_t current_instruction_id_;
599 // The dex file from which the method is from.
600 const DexFile& dex_file_;
602 // The method index in the dex file.
603 const uint32_t method_idx_;
605 // If inlined, this encodes how the callee is being invoked.
606 const InvokeType invoke_type_;
608 // Whether the graph has been transformed to SSA form. Only used
609 // in debug mode to ensure we are not using properties only valid
610 // for non-SSA form (like the number of temporaries).
613 const bool should_generate_constructor_barrier_;
615 const InstructionSet instruction_set_;
618 HNullConstant* cached_null_constant_;
619 ArenaSafeMap<int32_t, HIntConstant*> cached_int_constants_;
620 ArenaSafeMap<int32_t, HFloatConstant*> cached_float_constants_;
621 ArenaSafeMap<int64_t, HLongConstant*> cached_long_constants_;
622 ArenaSafeMap<int64_t, HDoubleConstant*> cached_double_constants_;
624 HCurrentMethod* cached_current_method_;
626 // The ArtMethod this graph is for. Note that for AOT, it may be null,
627 // for example for methods whose declaring class could not be resolved
628 // (such as when the superclass could not be found).
629 ArtMethod* art_method_;
631 // Keep the RTI of inexact Object to avoid having to pass stack handle
632 // collection pointer to passes which may create NullConstant.
633 ReferenceTypeInfo inexact_object_rti_;
635 // Whether we are compiling this graph for on stack replacement: this will
636 // make all loops seen as irreducible and emit special stack maps to mark
637 // compiled code entries which the interpreter can directly jump to.
640 friend class SsaBuilder; // For caching constants.
641 friend class SsaLivenessAnalysis; // For the linear order.
642 friend class HInliner; // For the reverse post order.
643 ART_FRIEND_TEST(GraphTest, IfSuccessorSimpleJoinBlock1);
644 DISALLOW_COPY_AND_ASSIGN(HGraph);
647 class HLoopInformation : public ArenaObject<kArenaAllocLoopInfo> {
649 HLoopInformation(HBasicBlock* header, HGraph* graph)
651 suspend_check_(nullptr),
653 contains_irreducible_loop_(false),
654 back_edges_(graph->GetArena()->Adapter(kArenaAllocLoopInfoBackEdges)),
655 // Make bit vector growable, as the number of blocks may change.
656 blocks_(graph->GetArena(), graph->GetBlocks().size(), true, kArenaAllocLoopInfoBackEdges) {
657 back_edges_.reserve(kDefaultNumberOfBackEdges);
660 bool IsIrreducible() const { return irreducible_; }
661 bool ContainsIrreducibleLoop() const { return contains_irreducible_loop_; }
663 void Dump(std::ostream& os);
665 HBasicBlock* GetHeader() const {
669 void SetHeader(HBasicBlock* block) {
673 HSuspendCheck* GetSuspendCheck() const { return suspend_check_; }
674 void SetSuspendCheck(HSuspendCheck* check) { suspend_check_ = check; }
675 bool HasSuspendCheck() const { return suspend_check_ != nullptr; }
677 void AddBackEdge(HBasicBlock* back_edge) {
678 back_edges_.push_back(back_edge);
681 void RemoveBackEdge(HBasicBlock* back_edge) {
682 RemoveElement(back_edges_, back_edge);
685 bool IsBackEdge(const HBasicBlock& block) const {
686 return ContainsElement(back_edges_, &block);
689 size_t NumberOfBackEdges() const {
690 return back_edges_.size();
693 HBasicBlock* GetPreHeader() const;
695 const ArenaVector<HBasicBlock*>& GetBackEdges() const {
699 // Returns the lifetime position of the back edge that has the
700 // greatest lifetime position.
701 size_t GetLifetimeEnd() const;
703 void ReplaceBackEdge(HBasicBlock* existing, HBasicBlock* new_back_edge) {
704 ReplaceElement(back_edges_, existing, new_back_edge);
707 // Finds blocks that are part of this loop.
710 // Returns whether this loop information contains `block`.
711 // Note that this loop information *must* be populated before entering this function.
712 bool Contains(const HBasicBlock& block) const;
714 // Returns whether this loop information is an inner loop of `other`.
715 // Note that `other` *must* be populated before entering this function.
716 bool IsIn(const HLoopInformation& other) const;
718 // Returns true if instruction is not defined within this loop.
719 bool IsDefinedOutOfTheLoop(HInstruction* instruction) const;
721 const ArenaBitVector& GetBlocks() const { return blocks_; }
723 void Add(HBasicBlock* block);
724 void Remove(HBasicBlock* block);
726 void ClearAllBlocks() {
727 blocks_.ClearAllBits();
730 bool HasBackEdgeNotDominatedByHeader() const;
732 bool IsPopulated() const {
733 return blocks_.GetHighestBitSet() != -1;
737 // Internal recursive implementation of `Populate`.
738 void PopulateRecursive(HBasicBlock* block);
739 void PopulateIrreducibleRecursive(HBasicBlock* block, ArenaBitVector* finalized);
741 HBasicBlock* header_;
742 HSuspendCheck* suspend_check_;
744 bool contains_irreducible_loop_;
745 ArenaVector<HBasicBlock*> back_edges_;
746 ArenaBitVector blocks_;
748 DISALLOW_COPY_AND_ASSIGN(HLoopInformation);
751 // Stores try/catch information for basic blocks.
752 // Note that HGraph is constructed so that catch blocks cannot simultaneously
754 class TryCatchInformation : public ArenaObject<kArenaAllocTryCatchInfo> {
756 // Try block information constructor.
757 explicit TryCatchInformation(const HTryBoundary& try_entry)
758 : try_entry_(&try_entry),
759 catch_dex_file_(nullptr),
760 catch_type_index_(DexFile::kDexNoIndex16) {
761 DCHECK(try_entry_ != nullptr);
764 // Catch block information constructor.
765 TryCatchInformation(uint16_t catch_type_index, const DexFile& dex_file)
766 : try_entry_(nullptr),
767 catch_dex_file_(&dex_file),
768 catch_type_index_(catch_type_index) {}
770 bool IsTryBlock() const { return try_entry_ != nullptr; }
772 const HTryBoundary& GetTryEntry() const {
773 DCHECK(IsTryBlock());
777 bool IsCatchBlock() const { return catch_dex_file_ != nullptr; }
779 bool IsCatchAllTypeIndex() const {
780 DCHECK(IsCatchBlock());
781 return catch_type_index_ == DexFile::kDexNoIndex16;
784 uint16_t GetCatchTypeIndex() const {
785 DCHECK(IsCatchBlock());
786 return catch_type_index_;
789 const DexFile& GetCatchDexFile() const {
790 DCHECK(IsCatchBlock());
791 return *catch_dex_file_;
795 // One of possibly several TryBoundary instructions entering the block's try.
796 // Only set for try blocks.
797 const HTryBoundary* try_entry_;
799 // Exception type information. Only set for catch blocks.
800 const DexFile* catch_dex_file_;
801 const uint16_t catch_type_index_;
804 static constexpr size_t kNoLifetime = -1;
805 static constexpr uint32_t kInvalidBlockId = static_cast<uint32_t>(-1);
807 // A block in a method. Contains the list of instructions represented
808 // as a double linked list. Each block knows its predecessors and
811 class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
813 HBasicBlock(HGraph* graph, uint32_t dex_pc = kNoDexPc)
815 predecessors_(graph->GetArena()->Adapter(kArenaAllocPredecessors)),
816 successors_(graph->GetArena()->Adapter(kArenaAllocSuccessors)),
817 loop_information_(nullptr),
819 dominated_blocks_(graph->GetArena()->Adapter(kArenaAllocDominated)),
820 block_id_(kInvalidBlockId),
822 lifetime_start_(kNoLifetime),
823 lifetime_end_(kNoLifetime),
824 try_catch_information_(nullptr) {
825 predecessors_.reserve(kDefaultNumberOfPredecessors);
826 successors_.reserve(kDefaultNumberOfSuccessors);
827 dominated_blocks_.reserve(kDefaultNumberOfDominatedBlocks);
830 const ArenaVector<HBasicBlock*>& GetPredecessors() const {
831 return predecessors_;
834 const ArenaVector<HBasicBlock*>& GetSuccessors() const {
838 ArrayRef<HBasicBlock* const> GetNormalSuccessors() const;
839 ArrayRef<HBasicBlock* const> GetExceptionalSuccessors() const;
841 bool HasSuccessor(const HBasicBlock* block, size_t start_from = 0u) {
842 return ContainsElement(successors_, block, start_from);
845 const ArenaVector<HBasicBlock*>& GetDominatedBlocks() const {
846 return dominated_blocks_;
849 bool IsEntryBlock() const {
850 return graph_->GetEntryBlock() == this;
853 bool IsExitBlock() const {
854 return graph_->GetExitBlock() == this;
857 bool IsSingleGoto() const;
858 bool IsSingleTryBoundary() const;
860 // Returns true if this block emits nothing but a jump.
861 bool IsSingleJump() const {
862 HLoopInformation* loop_info = GetLoopInformation();
863 return (IsSingleGoto() || IsSingleTryBoundary())
864 // Back edges generate a suspend check.
865 && (loop_info == nullptr || !loop_info->IsBackEdge(*this));
868 void AddBackEdge(HBasicBlock* back_edge) {
869 if (loop_information_ == nullptr) {
870 loop_information_ = new (graph_->GetArena()) HLoopInformation(this, graph_);
872 DCHECK_EQ(loop_information_->GetHeader(), this);
873 loop_information_->AddBackEdge(back_edge);
876 HGraph* GetGraph() const { return graph_; }
877 void SetGraph(HGraph* graph) { graph_ = graph; }
879 uint32_t GetBlockId() const { return block_id_; }
880 void SetBlockId(int id) { block_id_ = id; }
881 uint32_t GetDexPc() const { return dex_pc_; }
883 HBasicBlock* GetDominator() const { return dominator_; }
884 void SetDominator(HBasicBlock* dominator) { dominator_ = dominator; }
885 void AddDominatedBlock(HBasicBlock* block) { dominated_blocks_.push_back(block); }
887 void RemoveDominatedBlock(HBasicBlock* block) {
888 RemoveElement(dominated_blocks_, block);
891 void ReplaceDominatedBlock(HBasicBlock* existing, HBasicBlock* new_block) {
892 ReplaceElement(dominated_blocks_, existing, new_block);
895 void ClearDominanceInformation();
897 int NumberOfBackEdges() const {
898 return IsLoopHeader() ? loop_information_->NumberOfBackEdges() : 0;
901 HInstruction* GetFirstInstruction() const { return instructions_.first_instruction_; }
902 HInstruction* GetLastInstruction() const { return instructions_.last_instruction_; }
903 const HInstructionList& GetInstructions() const { return instructions_; }
904 HInstruction* GetFirstPhi() const { return phis_.first_instruction_; }
905 HInstruction* GetLastPhi() const { return phis_.last_instruction_; }
906 const HInstructionList& GetPhis() const { return phis_; }
908 HInstruction* GetFirstInstructionDisregardMoves() const;
910 void AddSuccessor(HBasicBlock* block) {
911 successors_.push_back(block);
912 block->predecessors_.push_back(this);
915 void ReplaceSuccessor(HBasicBlock* existing, HBasicBlock* new_block) {
916 size_t successor_index = GetSuccessorIndexOf(existing);
917 existing->RemovePredecessor(this);
918 new_block->predecessors_.push_back(this);
919 successors_[successor_index] = new_block;
922 void ReplacePredecessor(HBasicBlock* existing, HBasicBlock* new_block) {
923 size_t predecessor_index = GetPredecessorIndexOf(existing);
924 existing->RemoveSuccessor(this);
925 new_block->successors_.push_back(this);
926 predecessors_[predecessor_index] = new_block;
929 // Insert `this` between `predecessor` and `successor. This method
930 // preserves the indicies, and will update the first edge found between
931 // `predecessor` and `successor`.
932 void InsertBetween(HBasicBlock* predecessor, HBasicBlock* successor) {
933 size_t predecessor_index = successor->GetPredecessorIndexOf(predecessor);
934 size_t successor_index = predecessor->GetSuccessorIndexOf(successor);
935 successor->predecessors_[predecessor_index] = this;
936 predecessor->successors_[successor_index] = this;
937 successors_.push_back(successor);
938 predecessors_.push_back(predecessor);
941 void RemovePredecessor(HBasicBlock* block) {
942 predecessors_.erase(predecessors_.begin() + GetPredecessorIndexOf(block));
945 void RemoveSuccessor(HBasicBlock* block) {
946 successors_.erase(successors_.begin() + GetSuccessorIndexOf(block));
949 void ClearAllPredecessors() {
950 predecessors_.clear();
953 void AddPredecessor(HBasicBlock* block) {
954 predecessors_.push_back(block);
955 block->successors_.push_back(this);
958 void SwapPredecessors() {
959 DCHECK_EQ(predecessors_.size(), 2u);
960 std::swap(predecessors_[0], predecessors_[1]);
963 void SwapSuccessors() {
964 DCHECK_EQ(successors_.size(), 2u);
965 std::swap(successors_[0], successors_[1]);
968 size_t GetPredecessorIndexOf(HBasicBlock* predecessor) const {
969 return IndexOfElement(predecessors_, predecessor);
972 size_t GetSuccessorIndexOf(HBasicBlock* successor) const {
973 return IndexOfElement(successors_, successor);
976 HBasicBlock* GetSinglePredecessor() const {
977 DCHECK_EQ(GetPredecessors().size(), 1u);
978 return GetPredecessors()[0];
981 HBasicBlock* GetSingleSuccessor() const {
982 DCHECK_EQ(GetSuccessors().size(), 1u);
983 return GetSuccessors()[0];
986 // Returns whether the first occurrence of `predecessor` in the list of
987 // predecessors is at index `idx`.
988 bool IsFirstIndexOfPredecessor(HBasicBlock* predecessor, size_t idx) const {
989 DCHECK_EQ(GetPredecessors()[idx], predecessor);
990 return GetPredecessorIndexOf(predecessor) == idx;
993 // Create a new block between this block and its predecessors. The new block
994 // is added to the graph, all predecessor edges are relinked to it and an edge
995 // is created to `this`. Returns the new empty block. Reverse post order or
996 // loop and try/catch information are not updated.
997 HBasicBlock* CreateImmediateDominator();
999 // Split the block into two blocks just before `cursor`. Returns the newly
1000 // created, latter block. Note that this method will add the block to the
1001 // graph, create a Goto at the end of the former block and will create an edge
1002 // between the blocks. It will not, however, update the reverse post order or
1003 // loop and try/catch information.
1004 HBasicBlock* SplitBefore(HInstruction* cursor);
1006 // Split the block into two blocks just before `cursor`. Returns the newly
1007 // created block. Note that this method just updates raw block information,
1008 // like predecessors, successors, dominators, and instruction list. It does not
1009 // update the graph, reverse post order, loop information, nor make sure the
1010 // blocks are consistent (for example ending with a control flow instruction).
1011 HBasicBlock* SplitBeforeForInlining(HInstruction* cursor);
1013 // Similar to `SplitBeforeForInlining` but does it after `cursor`.
1014 HBasicBlock* SplitAfterForInlining(HInstruction* cursor);
1016 // Merge `other` at the end of `this`. Successors and dominated blocks of
1017 // `other` are changed to be successors and dominated blocks of `this`. Note
1018 // that this method does not update the graph, reverse post order, loop
1019 // information, nor make sure the blocks are consistent (for example ending
1020 // with a control flow instruction).
1021 void MergeWithInlined(HBasicBlock* other);
1023 // Replace `this` with `other`. Predecessors, successors, and dominated blocks
1024 // of `this` are moved to `other`.
1025 // Note that this method does not update the graph, reverse post order, loop
1026 // information, nor make sure the blocks are consistent (for example ending
1027 // with a control flow instruction).
1028 void ReplaceWith(HBasicBlock* other);
1030 // Merge `other` at the end of `this`. This method updates loops, reverse post
1031 // order, links to predecessors, successors, dominators and deletes the block
1032 // from the graph. The two blocks must be successive, i.e. `this` the only
1033 // predecessor of `other` and vice versa.
1034 void MergeWith(HBasicBlock* other);
1036 // Disconnects `this` from all its predecessors, successors and dominator,
1037 // removes it from all loops it is included in and eventually from the graph.
1038 // The block must not dominate any other block. Predecessors and successors
1039 // are safely updated.
1040 void DisconnectAndDelete();
1042 void AddInstruction(HInstruction* instruction);
1043 // Insert `instruction` before/after an existing instruction `cursor`.
1044 void InsertInstructionBefore(HInstruction* instruction, HInstruction* cursor);
1045 void InsertInstructionAfter(HInstruction* instruction, HInstruction* cursor);
1046 // Replace instruction `initial` with `replacement` within this block.
1047 void ReplaceAndRemoveInstructionWith(HInstruction* initial,
1048 HInstruction* replacement);
1049 void MoveInstructionBefore(HInstruction* insn, HInstruction* cursor);
1050 void AddPhi(HPhi* phi);
1051 void InsertPhiAfter(HPhi* instruction, HPhi* cursor);
1052 // RemoveInstruction and RemovePhi delete a given instruction from the respective
1053 // instruction list. With 'ensure_safety' set to true, it verifies that the
1054 // instruction is not in use and removes it from the use lists of its inputs.
1055 void RemoveInstruction(HInstruction* instruction, bool ensure_safety = true);
1056 void RemovePhi(HPhi* phi, bool ensure_safety = true);
1057 void RemoveInstructionOrPhi(HInstruction* instruction, bool ensure_safety = true);
1059 bool IsLoopHeader() const {
1060 return IsInLoop() && (loop_information_->GetHeader() == this);
1063 bool IsLoopPreHeaderFirstPredecessor() const {
1064 DCHECK(IsLoopHeader());
1065 return GetPredecessors()[0] == GetLoopInformation()->GetPreHeader();
1068 bool IsFirstPredecessorBackEdge() const {
1069 DCHECK(IsLoopHeader());
1070 return GetLoopInformation()->IsBackEdge(*GetPredecessors()[0]);
1073 HLoopInformation* GetLoopInformation() const {
1074 return loop_information_;
1077 // Set the loop_information_ on this block. Overrides the current
1078 // loop_information if it is an outer loop of the passed loop information.
1079 // Note that this method is called while creating the loop information.
1080 void SetInLoop(HLoopInformation* info) {
1081 if (IsLoopHeader()) {
1082 // Nothing to do. This just means `info` is an outer loop.
1083 } else if (!IsInLoop()) {
1084 loop_information_ = info;
1085 } else if (loop_information_->Contains(*info->GetHeader())) {
1086 // Block is currently part of an outer loop. Make it part of this inner loop.
1087 // Note that a non loop header having a loop information means this loop information
1088 // has already been populated
1089 loop_information_ = info;
1091 // Block is part of an inner loop. Do not update the loop information.
1092 // Note that we cannot do the check `info->Contains(loop_information_)->GetHeader()`
1093 // at this point, because this method is being called while populating `info`.
1097 // Raw update of the loop information.
1098 void SetLoopInformation(HLoopInformation* info) {
1099 loop_information_ = info;
1102 bool IsInLoop() const { return loop_information_ != nullptr; }
1104 TryCatchInformation* GetTryCatchInformation() const { return try_catch_information_; }
1106 void SetTryCatchInformation(TryCatchInformation* try_catch_information) {
1107 try_catch_information_ = try_catch_information;
1110 bool IsTryBlock() const {
1111 return try_catch_information_ != nullptr && try_catch_information_->IsTryBlock();
1114 bool IsCatchBlock() const {
1115 return try_catch_information_ != nullptr && try_catch_information_->IsCatchBlock();
1118 // Returns the try entry that this block's successors should have. They will
1119 // be in the same try, unless the block ends in a try boundary. In that case,
1120 // the appropriate try entry will be returned.
1121 const HTryBoundary* ComputeTryEntryOfSuccessors() const;
1123 bool HasThrowingInstructions() const;
1125 // Returns whether this block dominates the blocked passed as parameter.
1126 bool Dominates(HBasicBlock* block) const;
1128 size_t GetLifetimeStart() const { return lifetime_start_; }
1129 size_t GetLifetimeEnd() const { return lifetime_end_; }
1131 void SetLifetimeStart(size_t start) { lifetime_start_ = start; }
1132 void SetLifetimeEnd(size_t end) { lifetime_end_ = end; }
1134 bool EndsWithControlFlowInstruction() const;
1135 bool EndsWithIf() const;
1136 bool EndsWithTryBoundary() const;
1137 bool HasSinglePhi() const;
1141 ArenaVector<HBasicBlock*> predecessors_;
1142 ArenaVector<HBasicBlock*> successors_;
1143 HInstructionList instructions_;
1144 HInstructionList phis_;
1145 HLoopInformation* loop_information_;
1146 HBasicBlock* dominator_;
1147 ArenaVector<HBasicBlock*> dominated_blocks_;
1149 // The dex program counter of the first instruction of this block.
1150 const uint32_t dex_pc_;
1151 size_t lifetime_start_;
1152 size_t lifetime_end_;
1153 TryCatchInformation* try_catch_information_;
1155 friend class HGraph;
1156 friend class HInstruction;
1158 DISALLOW_COPY_AND_ASSIGN(HBasicBlock);
1161 // Iterates over the LoopInformation of all loops which contain 'block'
1162 // from the innermost to the outermost.
1163 class HLoopInformationOutwardIterator : public ValueObject {
1165 explicit HLoopInformationOutwardIterator(const HBasicBlock& block)
1166 : current_(block.GetLoopInformation()) {}
1168 bool Done() const { return current_ == nullptr; }
1172 current_ = current_->GetPreHeader()->GetLoopInformation();
1175 HLoopInformation* Current() const {
1181 HLoopInformation* current_;
1183 DISALLOW_COPY_AND_ASSIGN(HLoopInformationOutwardIterator);
1186 #define FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1187 M(Above, Condition) \
1188 M(AboveOrEqual, Condition) \
1189 M(Add, BinaryOperation) \
1190 M(And, BinaryOperation) \
1191 M(ArrayGet, Instruction) \
1192 M(ArrayLength, Instruction) \
1193 M(ArraySet, Instruction) \
1194 M(Below, Condition) \
1195 M(BelowOrEqual, Condition) \
1196 M(BooleanNot, UnaryOperation) \
1197 M(BoundsCheck, Instruction) \
1198 M(BoundType, Instruction) \
1199 M(CheckCast, Instruction) \
1200 M(ClassTableGet, Instruction) \
1201 M(ClearException, Instruction) \
1202 M(ClinitCheck, Instruction) \
1203 M(Compare, BinaryOperation) \
1204 M(CurrentMethod, Instruction) \
1205 M(Deoptimize, Instruction) \
1206 M(Div, BinaryOperation) \
1207 M(DivZeroCheck, Instruction) \
1208 M(DoubleConstant, Constant) \
1209 M(Equal, Condition) \
1210 M(Exit, Instruction) \
1211 M(FloatConstant, Constant) \
1212 M(Goto, Instruction) \
1213 M(GreaterThan, Condition) \
1214 M(GreaterThanOrEqual, Condition) \
1215 M(If, Instruction) \
1216 M(InstanceFieldGet, Instruction) \
1217 M(InstanceFieldSet, Instruction) \
1218 M(InstanceOf, Instruction) \
1219 M(IntConstant, Constant) \
1220 M(InvokeUnresolved, Invoke) \
1221 M(InvokeInterface, Invoke) \
1222 M(InvokeStaticOrDirect, Invoke) \
1223 M(InvokeVirtual, Invoke) \
1224 M(LessThan, Condition) \
1225 M(LessThanOrEqual, Condition) \
1226 M(LoadClass, Instruction) \
1227 M(LoadException, Instruction) \
1228 M(LoadString, Instruction) \
1229 M(LongConstant, Constant) \
1230 M(MemoryBarrier, Instruction) \
1231 M(MonitorOperation, Instruction) \
1232 M(Mul, BinaryOperation) \
1233 M(NativeDebugInfo, Instruction) \
1234 M(Neg, UnaryOperation) \
1235 M(NewArray, Instruction) \
1236 M(NewInstance, Instruction) \
1237 M(Not, UnaryOperation) \
1238 M(NotEqual, Condition) \
1239 M(NullConstant, Instruction) \
1240 M(NullCheck, Instruction) \
1241 M(Or, BinaryOperation) \
1242 M(PackedSwitch, Instruction) \
1243 M(ParallelMove, Instruction) \
1244 M(ParameterValue, Instruction) \
1245 M(Phi, Instruction) \
1246 M(Rem, BinaryOperation) \
1247 M(Return, Instruction) \
1248 M(ReturnVoid, Instruction) \
1249 M(Ror, BinaryOperation) \
1250 M(Shl, BinaryOperation) \
1251 M(Shr, BinaryOperation) \
1252 M(StaticFieldGet, Instruction) \
1253 M(StaticFieldSet, Instruction) \
1254 M(UnresolvedInstanceFieldGet, Instruction) \
1255 M(UnresolvedInstanceFieldSet, Instruction) \
1256 M(UnresolvedStaticFieldGet, Instruction) \
1257 M(UnresolvedStaticFieldSet, Instruction) \
1258 M(Select, Instruction) \
1259 M(Sub, BinaryOperation) \
1260 M(SuspendCheck, Instruction) \
1261 M(Throw, Instruction) \
1262 M(TryBoundary, Instruction) \
1263 M(TypeConversion, Instruction) \
1264 M(UShr, BinaryOperation) \
1265 M(Xor, BinaryOperation) \
1268 * Instructions, shared across several (not all) architectures.
1270 #if !defined(ART_ENABLE_CODEGEN_arm) && !defined(ART_ENABLE_CODEGEN_arm64)
1271 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M)
1273 #define FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1274 M(BitwiseNegatedRight, Instruction) \
1275 M(MultiplyAccumulate, Instruction)
1278 #ifndef ART_ENABLE_CODEGEN_arm
1279 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M)
1281 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \
1282 M(ArmDexCacheArraysBase, Instruction)
1285 #ifndef ART_ENABLE_CODEGEN_arm64
1286 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M)
1288 #define FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \
1289 M(Arm64DataProcWithShifterOp, Instruction) \
1290 M(Arm64IntermediateAddress, Instruction)
1293 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M)
1295 #define FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M)
1297 #ifndef ART_ENABLE_CODEGEN_x86
1298 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M)
1300 #define FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1301 M(X86ComputeBaseMethodAddress, Instruction) \
1302 M(X86LoadFromConstantTable, Instruction) \
1303 M(X86FPNeg, Instruction) \
1304 M(X86PackedSwitch, Instruction)
1307 #define FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1309 #define FOR_EACH_CONCRETE_INSTRUCTION(M) \
1310 FOR_EACH_CONCRETE_INSTRUCTION_COMMON(M) \
1311 FOR_EACH_CONCRETE_INSTRUCTION_SHARED(M) \
1312 FOR_EACH_CONCRETE_INSTRUCTION_ARM(M) \
1313 FOR_EACH_CONCRETE_INSTRUCTION_ARM64(M) \
1314 FOR_EACH_CONCRETE_INSTRUCTION_MIPS(M) \
1315 FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(M) \
1316 FOR_EACH_CONCRETE_INSTRUCTION_X86(M) \
1317 FOR_EACH_CONCRETE_INSTRUCTION_X86_64(M)
1319 #define FOR_EACH_ABSTRACT_INSTRUCTION(M) \
1320 M(Condition, BinaryOperation) \
1321 M(Constant, Instruction) \
1322 M(UnaryOperation, Instruction) \
1323 M(BinaryOperation, Instruction) \
1324 M(Invoke, Instruction)
1326 #define FOR_EACH_INSTRUCTION(M) \
1327 FOR_EACH_CONCRETE_INSTRUCTION(M) \
1328 FOR_EACH_ABSTRACT_INSTRUCTION(M)
1330 #define FORWARD_DECLARATION(type, super) class H##type;
1331 FOR_EACH_INSTRUCTION(FORWARD_DECLARATION)
1332 #undef FORWARD_DECLARATION
1334 #define DECLARE_INSTRUCTION(type) \
1335 InstructionKind GetKindInternal() const OVERRIDE { return k##type; } \
1336 const char* DebugName() const OVERRIDE { return #type; } \
1337 bool InstructionTypeEquals(HInstruction* other) const OVERRIDE { \
1338 return other->Is##type(); \
1340 void Accept(HGraphVisitor* visitor) OVERRIDE
1342 #define DECLARE_ABSTRACT_INSTRUCTION(type) \
1343 bool Is##type() const { return As##type() != nullptr; } \
1344 const H##type* As##type() const { return this; } \
1345 H##type* As##type() { return this; }
1347 template <typename T>
1348 class HUseListNode : public ArenaObject<kArenaAllocUseListNode> {
1350 T GetUser() const { return user_; }
1351 size_t GetIndex() const { return index_; }
1352 void SetIndex(size_t index) { index_ = index; }
1354 // Hook for the IntrusiveForwardList<>.
1355 // TODO: Hide this better.
1356 IntrusiveForwardListHook hook;
1359 HUseListNode(T user, size_t index)
1360 : user_(user), index_(index) {}
1365 friend class HInstruction;
1367 DISALLOW_COPY_AND_ASSIGN(HUseListNode);
1370 template <typename T>
1371 using HUseList = IntrusiveForwardList<HUseListNode<T>>;
1373 // This class is used by HEnvironment and HInstruction classes to record the
1374 // instructions they use and pointers to the corresponding HUseListNodes kept
1375 // by the used instructions.
1376 template <typename T>
1377 class HUserRecord : public ValueObject {
1379 HUserRecord() : instruction_(nullptr), before_use_node_() {}
1380 explicit HUserRecord(HInstruction* instruction) : instruction_(instruction), before_use_node_() {}
1382 HUserRecord(const HUserRecord<T>& old_record, typename HUseList<T>::iterator before_use_node)
1383 : HUserRecord(old_record.instruction_, before_use_node) {}
1384 HUserRecord(HInstruction* instruction, typename HUseList<T>::iterator before_use_node)
1385 : instruction_(instruction), before_use_node_(before_use_node) {
1386 DCHECK(instruction_ != nullptr);
1389 HInstruction* GetInstruction() const { return instruction_; }
1390 typename HUseList<T>::iterator GetBeforeUseNode() const { return before_use_node_; }
1391 typename HUseList<T>::iterator GetUseNode() const { return ++GetBeforeUseNode(); }
1394 // Instruction used by the user.
1395 HInstruction* instruction_;
1397 // Iterator before the corresponding entry in the use list kept by 'instruction_'.
1398 typename HUseList<T>::iterator before_use_node_;
1402 * Side-effects representation.
1404 * For write/read dependences on fields/arrays, the dependence analysis uses
1405 * type disambiguation (e.g. a float field write cannot modify the value of an
1406 * integer field read) and the access type (e.g. a reference array write cannot
1407 * modify the value of a reference field read [although it may modify the
1408 * reference fetch prior to reading the field, which is represented by its own
1409 * write/read dependence]). The analysis makes conservative points-to
1410 * assumptions on reference types (e.g. two same typed arrays are assumed to be
1411 * the same, and any reference read depends on any reference read without
1412 * further regard of its type).
1414 * The internal representation uses 38-bit and is described in the table below.
1415 * The first line indicates the side effect, and for field/array accesses the
1416 * second line indicates the type of the access (in the order of the
1417 * Primitive::Type enum).
1418 * The two numbered lines below indicate the bit position in the bitfield (read
1421 * |Depends on GC|ARRAY-R |FIELD-R |Can trigger GC|ARRAY-W |FIELD-W |
1422 * +-------------+---------+---------+--------------+---------+---------+
1423 * | |DFJISCBZL|DFJISCBZL| |DFJISCBZL|DFJISCBZL|
1424 * | 3 |333333322|222222221| 1 |111111110|000000000|
1425 * | 7 |654321098|765432109| 8 |765432109|876543210|
1427 * Note that, to ease the implementation, 'changes' bits are least significant
1428 * bits, while 'dependency' bits are most significant bits.
1430 class SideEffects : public ValueObject {
1432 SideEffects() : flags_(0) {}
1434 static SideEffects None() {
1435 return SideEffects(0);
1438 static SideEffects All() {
1439 return SideEffects(kAllChangeBits | kAllDependOnBits);
1442 static SideEffects AllChanges() {
1443 return SideEffects(kAllChangeBits);
1446 static SideEffects AllDependencies() {
1447 return SideEffects(kAllDependOnBits);
1450 static SideEffects AllExceptGCDependency() {
1451 return AllWritesAndReads().Union(SideEffects::CanTriggerGC());
1454 static SideEffects AllWritesAndReads() {
1455 return SideEffects(kAllWrites | kAllReads);
1458 static SideEffects AllWrites() {
1459 return SideEffects(kAllWrites);
1462 static SideEffects AllReads() {
1463 return SideEffects(kAllReads);
1466 static SideEffects FieldWriteOfType(Primitive::Type type, bool is_volatile) {
1468 ? AllWritesAndReads()
1469 : SideEffects(TypeFlag(type, kFieldWriteOffset));
1472 static SideEffects ArrayWriteOfType(Primitive::Type type) {
1473 return SideEffects(TypeFlag(type, kArrayWriteOffset));
1476 static SideEffects FieldReadOfType(Primitive::Type type, bool is_volatile) {
1478 ? AllWritesAndReads()
1479 : SideEffects(TypeFlag(type, kFieldReadOffset));
1482 static SideEffects ArrayReadOfType(Primitive::Type type) {
1483 return SideEffects(TypeFlag(type, kArrayReadOffset));
1486 static SideEffects CanTriggerGC() {
1487 return SideEffects(1ULL << kCanTriggerGCBit);
1490 static SideEffects DependsOnGC() {
1491 return SideEffects(1ULL << kDependsOnGCBit);
1494 // Combines the side-effects of this and the other.
1495 SideEffects Union(SideEffects other) const {
1496 return SideEffects(flags_ | other.flags_);
1499 SideEffects Exclusion(SideEffects other) const {
1500 return SideEffects(flags_ & ~other.flags_);
1503 void Add(SideEffects other) {
1504 flags_ |= other.flags_;
1507 bool Includes(SideEffects other) const {
1508 return (other.flags_ & flags_) == other.flags_;
1511 bool HasSideEffects() const {
1512 return (flags_ & kAllChangeBits);
1515 bool HasDependencies() const {
1516 return (flags_ & kAllDependOnBits);
1519 // Returns true if there are no side effects or dependencies.
1520 bool DoesNothing() const {
1524 // Returns true if something is written.
1525 bool DoesAnyWrite() const {
1526 return (flags_ & kAllWrites);
1529 // Returns true if something is read.
1530 bool DoesAnyRead() const {
1531 return (flags_ & kAllReads);
1534 // Returns true if potentially everything is written and read
1535 // (every type and every kind of access).
1536 bool DoesAllReadWrite() const {
1537 return (flags_ & (kAllWrites | kAllReads)) == (kAllWrites | kAllReads);
1540 bool DoesAll() const {
1541 return flags_ == (kAllChangeBits | kAllDependOnBits);
1544 // Returns true if `this` may read something written by `other`.
1545 bool MayDependOn(SideEffects other) const {
1546 const uint64_t depends_on_flags = (flags_ & kAllDependOnBits) >> kChangeBits;
1547 return (other.flags_ & depends_on_flags);
1550 // Returns string representation of flags (for debugging only).
1551 // Format: |x|DFJISCBZL|DFJISCBZL|y|DFJISCBZL|DFJISCBZL|
1552 std::string ToString() const {
1553 std::string flags = "|";
1554 for (int s = kLastBit; s >= 0; s--) {
1555 bool current_bit_is_set = ((flags_ >> s) & 1) != 0;
1556 if ((s == kDependsOnGCBit) || (s == kCanTriggerGCBit)) {
1557 // This is a bit for the GC side effect.
1558 if (current_bit_is_set) {
1563 // This is a bit for the array/field analysis.
1564 // The underscore character stands for the 'can trigger GC' bit.
1565 static const char *kDebug = "LZBCSIJFDLZBCSIJFD_LZBCSIJFDLZBCSIJFD";
1566 if (current_bit_is_set) {
1569 if ((s == kFieldWriteOffset) || (s == kArrayWriteOffset) ||
1570 (s == kFieldReadOffset) || (s == kArrayReadOffset)) {
1578 bool Equals(const SideEffects& other) const { return flags_ == other.flags_; }
1581 static constexpr int kFieldArrayAnalysisBits = 9;
1583 static constexpr int kFieldWriteOffset = 0;
1584 static constexpr int kArrayWriteOffset = kFieldWriteOffset + kFieldArrayAnalysisBits;
1585 static constexpr int kLastBitForWrites = kArrayWriteOffset + kFieldArrayAnalysisBits - 1;
1586 static constexpr int kCanTriggerGCBit = kLastBitForWrites + 1;
1588 static constexpr int kChangeBits = kCanTriggerGCBit + 1;
1590 static constexpr int kFieldReadOffset = kCanTriggerGCBit + 1;
1591 static constexpr int kArrayReadOffset = kFieldReadOffset + kFieldArrayAnalysisBits;
1592 static constexpr int kLastBitForReads = kArrayReadOffset + kFieldArrayAnalysisBits - 1;
1593 static constexpr int kDependsOnGCBit = kLastBitForReads + 1;
1595 static constexpr int kLastBit = kDependsOnGCBit;
1596 static constexpr int kDependOnBits = kLastBit + 1 - kChangeBits;
1600 static_assert(kChangeBits == kDependOnBits,
1601 "the 'change' bits should match the 'depend on' bits.");
1603 static constexpr uint64_t kAllChangeBits = ((1ULL << kChangeBits) - 1);
1604 static constexpr uint64_t kAllDependOnBits = ((1ULL << kDependOnBits) - 1) << kChangeBits;
1605 static constexpr uint64_t kAllWrites =
1606 ((1ULL << (kLastBitForWrites + 1 - kFieldWriteOffset)) - 1) << kFieldWriteOffset;
1607 static constexpr uint64_t kAllReads =
1608 ((1ULL << (kLastBitForReads + 1 - kFieldReadOffset)) - 1) << kFieldReadOffset;
1610 // Translates type to bit flag.
1611 static uint64_t TypeFlag(Primitive::Type type, int offset) {
1612 CHECK_NE(type, Primitive::kPrimVoid);
1613 const uint64_t one = 1;
1614 const int shift = type; // 0-based consecutive enum
1615 DCHECK_LE(kFieldWriteOffset, shift);
1616 DCHECK_LT(shift, kArrayWriteOffset);
1617 return one << (type + offset);
1620 // Private constructor on direct flags value.
1621 explicit SideEffects(uint64_t flags) : flags_(flags) {}
1626 // A HEnvironment object contains the values of virtual registers at a given location.
1627 class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
1629 HEnvironment(ArenaAllocator* arena,
1630 size_t number_of_vregs,
1631 const DexFile& dex_file,
1632 uint32_t method_idx,
1634 InvokeType invoke_type,
1635 HInstruction* holder)
1636 : vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
1637 locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
1639 dex_file_(dex_file),
1640 method_idx_(method_idx),
1642 invoke_type_(invoke_type),
1646 HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
1647 : HEnvironment(arena,
1649 to_copy.GetDexFile(),
1650 to_copy.GetMethodIdx(),
1652 to_copy.GetInvokeType(),
1655 void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
1656 if (parent_ != nullptr) {
1657 parent_->SetAndCopyParentChain(allocator, parent);
1659 parent_ = new (allocator) HEnvironment(allocator, *parent, holder_);
1660 parent_->CopyFrom(parent);
1661 if (parent->GetParent() != nullptr) {
1662 parent_->SetAndCopyParentChain(allocator, parent->GetParent());
1667 void CopyFrom(const ArenaVector<HInstruction*>& locals);
1668 void CopyFrom(HEnvironment* environment);
1670 // Copy from `env`. If it's a loop phi for `loop_header`, copy the first
1671 // input to the loop phi instead. This is for inserting instructions that
1672 // require an environment (like HDeoptimization) in the loop pre-header.
1673 void CopyFromWithLoopPhiAdjustment(HEnvironment* env, HBasicBlock* loop_header);
1675 void SetRawEnvAt(size_t index, HInstruction* instruction) {
1676 vregs_[index] = HUserRecord<HEnvironment*>(instruction);
1679 HInstruction* GetInstructionAt(size_t index) const {
1680 return vregs_[index].GetInstruction();
1683 void RemoveAsUserOfInput(size_t index) const;
1685 size_t Size() const { return vregs_.size(); }
1687 HEnvironment* GetParent() const { return parent_; }
1689 void SetLocationAt(size_t index, Location location) {
1690 locations_[index] = location;
1693 Location GetLocationAt(size_t index) const {
1694 return locations_[index];
1697 uint32_t GetDexPc() const {
1701 uint32_t GetMethodIdx() const {
1705 InvokeType GetInvokeType() const {
1706 return invoke_type_;
1709 const DexFile& GetDexFile() const {
1713 HInstruction* GetHolder() const {
1718 bool IsFromInlinedInvoke() const {
1719 return GetParent() != nullptr;
1723 ArenaVector<HUserRecord<HEnvironment*>> vregs_;
1724 ArenaVector<Location> locations_;
1725 HEnvironment* parent_;
1726 const DexFile& dex_file_;
1727 const uint32_t method_idx_;
1728 const uint32_t dex_pc_;
1729 const InvokeType invoke_type_;
1731 // The instruction that holds this environment.
1732 HInstruction* const holder_;
1734 friend class HInstruction;
1736 DISALLOW_COPY_AND_ASSIGN(HEnvironment);
1739 class HInstruction : public ArenaObject<kArenaAllocInstruction> {
1741 HInstruction(SideEffects side_effects, uint32_t dex_pc)
1742 : previous_(nullptr),
1749 environment_(nullptr),
1750 locations_(nullptr),
1751 live_interval_(nullptr),
1752 lifetime_position_(kNoLifetime),
1753 side_effects_(side_effects),
1754 reference_type_handle_(ReferenceTypeInfo::CreateInvalid().GetTypeHandle()) {
1755 SetPackedFlag<kFlagReferenceTypeIsExact>(ReferenceTypeInfo::CreateInvalid().IsExact());
1758 virtual ~HInstruction() {}
1760 #define DECLARE_KIND(type, super) k##type,
1761 enum InstructionKind {
1762 FOR_EACH_INSTRUCTION(DECLARE_KIND)
1766 HInstruction* GetNext() const { return next_; }
1767 HInstruction* GetPrevious() const { return previous_; }
1769 HInstruction* GetNextDisregardingMoves() const;
1770 HInstruction* GetPreviousDisregardingMoves() const;
1772 HBasicBlock* GetBlock() const { return block_; }
1773 ArenaAllocator* GetArena() const { return block_->GetGraph()->GetArena(); }
1774 void SetBlock(HBasicBlock* block) { block_ = block; }
1775 bool IsInBlock() const { return block_ != nullptr; }
1776 bool IsInLoop() const { return block_->IsInLoop(); }
1777 bool IsLoopHeaderPhi() const { return IsPhi() && block_->IsLoopHeader(); }
1778 bool IsIrreducibleLoopHeaderPhi() const {
1779 return IsLoopHeaderPhi() && GetBlock()->GetLoopInformation()->IsIrreducible();
1782 virtual size_t InputCount() const = 0;
1783 HInstruction* InputAt(size_t i) const { return InputRecordAt(i).GetInstruction(); }
1785 virtual void Accept(HGraphVisitor* visitor) = 0;
1786 virtual const char* DebugName() const = 0;
1788 virtual Primitive::Type GetType() const { return Primitive::kPrimVoid; }
1789 void SetRawInputAt(size_t index, HInstruction* input) {
1790 SetRawInputRecordAt(index, HUserRecord<HInstruction*>(input));
1793 virtual bool NeedsEnvironment() const { return false; }
1795 uint32_t GetDexPc() const { return dex_pc_; }
1797 virtual bool IsControlFlow() const { return false; }
1799 virtual bool CanThrow() const { return false; }
1800 bool CanThrowIntoCatchBlock() const { return CanThrow() && block_->IsTryBlock(); }
1802 bool HasSideEffects() const { return side_effects_.HasSideEffects(); }
1803 bool DoesAnyWrite() const { return side_effects_.DoesAnyWrite(); }
1805 // Does not apply for all instructions, but having this at top level greatly
1806 // simplifies the null check elimination.
1807 // TODO: Consider merging can_be_null into ReferenceTypeInfo.
1808 virtual bool CanBeNull() const {
1809 DCHECK_EQ(GetType(), Primitive::kPrimNot) << "CanBeNull only applies to reference types";
1813 virtual bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const {
1817 virtual bool IsActualObject() const {
1818 return GetType() == Primitive::kPrimNot;
1821 void SetReferenceTypeInfo(ReferenceTypeInfo rti);
1823 ReferenceTypeInfo GetReferenceTypeInfo() const {
1824 DCHECK_EQ(GetType(), Primitive::kPrimNot);
1825 return ReferenceTypeInfo::CreateUnchecked(reference_type_handle_,
1826 GetPackedFlag<kFlagReferenceTypeIsExact>());
1829 void AddUseAt(HInstruction* user, size_t index) {
1830 DCHECK(user != nullptr);
1831 // Note: fixup_end remains valid across push_front().
1832 auto fixup_end = uses_.empty() ? uses_.begin() : ++uses_.begin();
1833 HUseListNode<HInstruction*>* new_node =
1834 new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HInstruction*>(user, index);
1835 uses_.push_front(*new_node);
1836 FixUpUserRecordsAfterUseInsertion(fixup_end);
1839 void AddEnvUseAt(HEnvironment* user, size_t index) {
1840 DCHECK(user != nullptr);
1841 // Note: env_fixup_end remains valid across push_front().
1842 auto env_fixup_end = env_uses_.empty() ? env_uses_.begin() : ++env_uses_.begin();
1843 HUseListNode<HEnvironment*>* new_node =
1844 new (GetBlock()->GetGraph()->GetArena()) HUseListNode<HEnvironment*>(user, index);
1845 env_uses_.push_front(*new_node);
1846 FixUpUserRecordsAfterEnvUseInsertion(env_fixup_end);
1849 void RemoveAsUserOfInput(size_t input) {
1850 HUserRecord<HInstruction*> input_use = InputRecordAt(input);
1851 HUseList<HInstruction*>::iterator before_use_node = input_use.GetBeforeUseNode();
1852 input_use.GetInstruction()->uses_.erase_after(before_use_node);
1853 input_use.GetInstruction()->FixUpUserRecordsAfterUseRemoval(before_use_node);
1856 const HUseList<HInstruction*>& GetUses() const { return uses_; }
1857 const HUseList<HEnvironment*>& GetEnvUses() const { return env_uses_; }
1859 bool HasUses() const { return !uses_.empty() || !env_uses_.empty(); }
1860 bool HasEnvironmentUses() const { return !env_uses_.empty(); }
1861 bool HasNonEnvironmentUses() const { return !uses_.empty(); }
1862 bool HasOnlyOneNonEnvironmentUse() const {
1863 return !HasEnvironmentUses() && GetUses().HasExactlyOneElement();
1866 // Does this instruction strictly dominate `other_instruction`?
1867 // Returns false if this instruction and `other_instruction` are the same.
1868 // Aborts if this instruction and `other_instruction` are both phis.
1869 bool StrictlyDominates(HInstruction* other_instruction) const;
1871 int GetId() const { return id_; }
1872 void SetId(int id) { id_ = id; }
1874 int GetSsaIndex() const { return ssa_index_; }
1875 void SetSsaIndex(int ssa_index) { ssa_index_ = ssa_index; }
1876 bool HasSsaIndex() const { return ssa_index_ != -1; }
1878 bool HasEnvironment() const { return environment_ != nullptr; }
1879 HEnvironment* GetEnvironment() const { return environment_; }
1880 // Set the `environment_` field. Raw because this method does not
1881 // update the uses lists.
1882 void SetRawEnvironment(HEnvironment* environment) {
1883 DCHECK(environment_ == nullptr);
1884 DCHECK_EQ(environment->GetHolder(), this);
1885 environment_ = environment;
1888 void RemoveEnvironment();
1890 // Set the environment of this instruction, copying it from `environment`. While
1891 // copying, the uses lists are being updated.
1892 void CopyEnvironmentFrom(HEnvironment* environment) {
1893 DCHECK(environment_ == nullptr);
1894 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1895 environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1896 environment_->CopyFrom(environment);
1897 if (environment->GetParent() != nullptr) {
1898 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1902 void CopyEnvironmentFromWithLoopPhiAdjustment(HEnvironment* environment,
1903 HBasicBlock* block) {
1904 DCHECK(environment_ == nullptr);
1905 ArenaAllocator* allocator = GetBlock()->GetGraph()->GetArena();
1906 environment_ = new (allocator) HEnvironment(allocator, *environment, this);
1907 environment_->CopyFromWithLoopPhiAdjustment(environment, block);
1908 if (environment->GetParent() != nullptr) {
1909 environment_->SetAndCopyParentChain(allocator, environment->GetParent());
1913 // Returns the number of entries in the environment. Typically, that is the
1914 // number of dex registers in a method. It could be more in case of inlining.
1915 size_t EnvironmentSize() const;
1917 LocationSummary* GetLocations() const { return locations_; }
1918 void SetLocations(LocationSummary* locations) { locations_ = locations; }
1920 void ReplaceWith(HInstruction* instruction);
1921 void ReplaceInput(HInstruction* replacement, size_t index);
1923 // This is almost the same as doing `ReplaceWith()`. But in this helper, the
1924 // uses of this instruction by `other` are *not* updated.
1925 void ReplaceWithExceptInReplacementAtIndex(HInstruction* other, size_t use_index) {
1927 other->ReplaceInput(this, use_index);
1930 // Move `this` instruction before `cursor`.
1931 void MoveBefore(HInstruction* cursor);
1933 // Move `this` before its first user and out of any loops. If there is no
1934 // out-of-loop user that dominates all other users, move the instruction
1935 // to the end of the out-of-loop common dominator of the user's blocks.
1937 // This can be used only on non-throwing instructions with no side effects that
1938 // have at least one use but no environment uses.
1939 void MoveBeforeFirstUserAndOutOfLoops();
1941 #define INSTRUCTION_TYPE_CHECK(type, super) \
1942 bool Is##type() const; \
1943 const H##type* As##type() const; \
1944 H##type* As##type();
1946 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
1947 #undef INSTRUCTION_TYPE_CHECK
1949 #define INSTRUCTION_TYPE_CHECK(type, super) \
1950 bool Is##type() const { return (As##type() != nullptr); } \
1951 virtual const H##type* As##type() const { return nullptr; } \
1952 virtual H##type* As##type() { return nullptr; }
1953 FOR_EACH_ABSTRACT_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
1954 #undef INSTRUCTION_TYPE_CHECK
1956 // Returns whether the instruction can be moved within the graph.
1957 virtual bool CanBeMoved() const { return false; }
1959 // Returns whether the two instructions are of the same kind.
1960 virtual bool InstructionTypeEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
1964 // Returns whether any data encoded in the two instructions is equal.
1965 // This method does not look at the inputs. Both instructions must be
1966 // of the same type, otherwise the method has undefined behavior.
1967 virtual bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const {
1971 // Returns whether two instructions are equal, that is:
1972 // 1) They have the same type and contain the same data (InstructionDataEquals).
1973 // 2) Their inputs are identical.
1974 bool Equals(HInstruction* other) const;
1976 // TODO: Remove this indirection when the [[pure]] attribute proposal (n3744)
1977 // is adopted and implemented by our C++ compiler(s). Fow now, we need to hide
1978 // the virtual function because the __attribute__((__pure__)) doesn't really
1979 // apply the strong requirement for virtual functions, preventing optimizations.
1980 InstructionKind GetKind() const PURE;
1981 virtual InstructionKind GetKindInternal() const = 0;
1983 virtual size_t ComputeHashCode() const {
1984 size_t result = GetKind();
1985 for (size_t i = 0, e = InputCount(); i < e; ++i) {
1986 result = (result * 31) + InputAt(i)->GetId();
1991 SideEffects GetSideEffects() const { return side_effects_; }
1992 void SetSideEffects(SideEffects other) { side_effects_ = other; }
1993 void AddSideEffects(SideEffects other) { side_effects_.Add(other); }
1995 size_t GetLifetimePosition() const { return lifetime_position_; }
1996 void SetLifetimePosition(size_t position) { lifetime_position_ = position; }
1997 LiveInterval* GetLiveInterval() const { return live_interval_; }
1998 void SetLiveInterval(LiveInterval* interval) { live_interval_ = interval; }
1999 bool HasLiveInterval() const { return live_interval_ != nullptr; }
2001 bool IsSuspendCheckEntry() const { return IsSuspendCheck() && GetBlock()->IsEntryBlock(); }
2003 // Returns whether the code generation of the instruction will require to have access
2004 // to the current method. Such instructions are:
2005 // (1): Instructions that require an environment, as calling the runtime requires
2006 // to walk the stack and have the current method stored at a specific stack address.
2007 // (2): Object literals like classes and strings, that are loaded from the dex cache
2008 // fields of the current method.
2009 bool NeedsCurrentMethod() const {
2010 return NeedsEnvironment() || IsLoadClass() || IsLoadString();
2013 // Returns whether the code generation of the instruction will require to have access
2014 // to the dex cache of the current method's declaring class via the current method.
2015 virtual bool NeedsDexCacheOfDeclaringClass() const { return false; }
2017 // Does this instruction have any use in an environment before
2018 // control flow hits 'other'?
2019 bool HasAnyEnvironmentUseBefore(HInstruction* other);
2021 // Remove all references to environment uses of this instruction.
2022 // The caller must ensure that this is safe to do.
2023 void RemoveEnvironmentUsers();
2025 bool IsEmittedAtUseSite() const { return GetPackedFlag<kFlagEmittedAtUseSite>(); }
2026 void MarkEmittedAtUseSite() { SetPackedFlag<kFlagEmittedAtUseSite>(true); }
2029 // If set, the machine code for this instruction is assumed to be generated by
2030 // its users. Used by liveness analysis to compute use positions accordingly.
2031 static constexpr size_t kFlagEmittedAtUseSite = 0u;
2032 static constexpr size_t kFlagReferenceTypeIsExact = kFlagEmittedAtUseSite + 1;
2033 static constexpr size_t kNumberOfGenericPackedBits = kFlagReferenceTypeIsExact + 1;
2034 static constexpr size_t kMaxNumberOfPackedBits = sizeof(uint32_t) * kBitsPerByte;
2036 virtual const HUserRecord<HInstruction*> InputRecordAt(size_t i) const = 0;
2037 virtual void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) = 0;
2039 uint32_t GetPackedFields() const {
2040 return packed_fields_;
2043 template <size_t flag>
2044 bool GetPackedFlag() const {
2045 return (packed_fields_ & (1u << flag)) != 0u;
2048 template <size_t flag>
2049 void SetPackedFlag(bool value = true) {
2050 packed_fields_ = (packed_fields_ & ~(1u << flag)) | ((value ? 1u : 0u) << flag);
2053 template <typename BitFieldType>
2054 typename BitFieldType::value_type GetPackedField() const {
2055 return BitFieldType::Decode(packed_fields_);
2058 template <typename BitFieldType>
2059 void SetPackedField(typename BitFieldType::value_type value) {
2060 DCHECK(IsUint<BitFieldType::size>(static_cast<uintptr_t>(value)));
2061 packed_fields_ = BitFieldType::Update(value, packed_fields_);
2065 void FixUpUserRecordsAfterUseInsertion(HUseList<HInstruction*>::iterator fixup_end) {
2066 auto before_use_node = uses_.before_begin();
2067 for (auto use_node = uses_.begin(); use_node != fixup_end; ++use_node) {
2068 HInstruction* user = use_node->GetUser();
2069 size_t input_index = use_node->GetIndex();
2070 user->SetRawInputRecordAt(input_index, HUserRecord<HInstruction*>(this, before_use_node));
2071 before_use_node = use_node;
2075 void FixUpUserRecordsAfterUseRemoval(HUseList<HInstruction*>::iterator before_use_node) {
2076 auto next = ++HUseList<HInstruction*>::iterator(before_use_node);
2077 if (next != uses_.end()) {
2078 HInstruction* next_user = next->GetUser();
2079 size_t next_index = next->GetIndex();
2080 DCHECK(next_user->InputRecordAt(next_index).GetInstruction() == this);
2081 next_user->SetRawInputRecordAt(next_index, HUserRecord<HInstruction*>(this, before_use_node));
2085 void FixUpUserRecordsAfterEnvUseInsertion(HUseList<HEnvironment*>::iterator env_fixup_end) {
2086 auto before_env_use_node = env_uses_.before_begin();
2087 for (auto env_use_node = env_uses_.begin(); env_use_node != env_fixup_end; ++env_use_node) {
2088 HEnvironment* user = env_use_node->GetUser();
2089 size_t input_index = env_use_node->GetIndex();
2090 user->vregs_[input_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2091 before_env_use_node = env_use_node;
2095 void FixUpUserRecordsAfterEnvUseRemoval(HUseList<HEnvironment*>::iterator before_env_use_node) {
2096 auto next = ++HUseList<HEnvironment*>::iterator(before_env_use_node);
2097 if (next != env_uses_.end()) {
2098 HEnvironment* next_user = next->GetUser();
2099 size_t next_index = next->GetIndex();
2100 DCHECK(next_user->vregs_[next_index].GetInstruction() == this);
2101 next_user->vregs_[next_index] = HUserRecord<HEnvironment*>(this, before_env_use_node);
2105 HInstruction* previous_;
2106 HInstruction* next_;
2107 HBasicBlock* block_;
2108 const uint32_t dex_pc_;
2110 // An instruction gets an id when it is added to the graph.
2111 // It reflects creation order. A negative id means the instruction
2112 // has not been added to the graph.
2115 // When doing liveness analysis, instructions that have uses get an SSA index.
2119 uint32_t packed_fields_;
2121 // List of instructions that have this instruction as input.
2122 HUseList<HInstruction*> uses_;
2124 // List of environments that contain this instruction.
2125 HUseList<HEnvironment*> env_uses_;
2127 // The environment associated with this instruction. Not null if the instruction
2128 // might jump out of the method.
2129 HEnvironment* environment_;
2131 // Set by the code generator.
2132 LocationSummary* locations_;
2134 // Set by the liveness analysis.
2135 LiveInterval* live_interval_;
2137 // Set by the liveness analysis, this is the position in a linear
2138 // order of blocks where this instruction's live interval start.
2139 size_t lifetime_position_;
2141 SideEffects side_effects_;
2143 // The reference handle part of the reference type info.
2144 // The IsExact() flag is stored in packed fields.
2145 // TODO: for primitive types this should be marked as invalid.
2146 ReferenceTypeInfo::TypeHandle reference_type_handle_;
2148 friend class GraphChecker;
2149 friend class HBasicBlock;
2150 friend class HEnvironment;
2151 friend class HGraph;
2152 friend class HInstructionList;
2154 DISALLOW_COPY_AND_ASSIGN(HInstruction);
2156 std::ostream& operator<<(std::ostream& os, const HInstruction::InstructionKind& rhs);
2158 class HInputIterator : public ValueObject {
2160 explicit HInputIterator(HInstruction* instruction) : instruction_(instruction), index_(0) {}
2162 bool Done() const { return index_ == instruction_->InputCount(); }
2163 HInstruction* Current() const { return instruction_->InputAt(index_); }
2164 void Advance() { index_++; }
2167 HInstruction* instruction_;
2170 DISALLOW_COPY_AND_ASSIGN(HInputIterator);
2173 class HInstructionIterator : public ValueObject {
2175 explicit HInstructionIterator(const HInstructionList& instructions)
2176 : instruction_(instructions.first_instruction_) {
2177 next_ = Done() ? nullptr : instruction_->GetNext();
2180 bool Done() const { return instruction_ == nullptr; }
2181 HInstruction* Current() const { return instruction_; }
2183 instruction_ = next_;
2184 next_ = Done() ? nullptr : instruction_->GetNext();
2188 HInstruction* instruction_;
2189 HInstruction* next_;
2191 DISALLOW_COPY_AND_ASSIGN(HInstructionIterator);
2194 class HBackwardInstructionIterator : public ValueObject {
2196 explicit HBackwardInstructionIterator(const HInstructionList& instructions)
2197 : instruction_(instructions.last_instruction_) {
2198 next_ = Done() ? nullptr : instruction_->GetPrevious();
2201 bool Done() const { return instruction_ == nullptr; }
2202 HInstruction* Current() const { return instruction_; }
2204 instruction_ = next_;
2205 next_ = Done() ? nullptr : instruction_->GetPrevious();
2209 HInstruction* instruction_;
2210 HInstruction* next_;
2212 DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
2216 class HTemplateInstruction: public HInstruction {
2218 HTemplateInstruction<N>(SideEffects side_effects, uint32_t dex_pc)
2219 : HInstruction(side_effects, dex_pc), inputs_() {}
2220 virtual ~HTemplateInstruction() {}
2222 size_t InputCount() const OVERRIDE { return N; }
2225 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE {
2230 void SetRawInputRecordAt(size_t i, const HUserRecord<HInstruction*>& input) OVERRIDE {
2236 std::array<HUserRecord<HInstruction*>, N> inputs_;
2238 friend class SsaBuilder;
2241 // HTemplateInstruction specialization for N=0.
2243 class HTemplateInstruction<0>: public HInstruction {
2245 explicit HTemplateInstruction<0>(SideEffects side_effects, uint32_t dex_pc)
2246 : HInstruction(side_effects, dex_pc) {}
2248 virtual ~HTemplateInstruction() {}
2250 size_t InputCount() const OVERRIDE { return 0; }
2253 const HUserRecord<HInstruction*> InputRecordAt(size_t i ATTRIBUTE_UNUSED) const OVERRIDE {
2254 LOG(FATAL) << "Unreachable";
2258 void SetRawInputRecordAt(size_t i ATTRIBUTE_UNUSED,
2259 const HUserRecord<HInstruction*>& input ATTRIBUTE_UNUSED) OVERRIDE {
2260 LOG(FATAL) << "Unreachable";
2265 friend class SsaBuilder;
2268 template<intptr_t N>
2269 class HExpression : public HTemplateInstruction<N> {
2271 HExpression<N>(Primitive::Type type, SideEffects side_effects, uint32_t dex_pc)
2272 : HTemplateInstruction<N>(side_effects, dex_pc) {
2273 this->template SetPackedField<TypeField>(type);
2275 virtual ~HExpression() {}
2277 Primitive::Type GetType() const OVERRIDE {
2278 return TypeField::Decode(this->GetPackedFields());
2282 static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2283 static constexpr size_t kFieldTypeSize =
2284 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2285 static constexpr size_t kNumberOfExpressionPackedBits = kFieldType + kFieldTypeSize;
2286 static_assert(kNumberOfExpressionPackedBits <= HInstruction::kMaxNumberOfPackedBits,
2287 "Too many packed fields.");
2288 using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2291 // Represents dex's RETURN_VOID opcode. A HReturnVoid is a control flow
2292 // instruction that branches to the exit block.
2293 class HReturnVoid FINAL : public HTemplateInstruction<0> {
2295 explicit HReturnVoid(uint32_t dex_pc = kNoDexPc)
2296 : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2298 bool IsControlFlow() const OVERRIDE { return true; }
2300 DECLARE_INSTRUCTION(ReturnVoid);
2303 DISALLOW_COPY_AND_ASSIGN(HReturnVoid);
2306 // Represents dex's RETURN opcodes. A HReturn is a control flow
2307 // instruction that branches to the exit block.
2308 class HReturn FINAL : public HTemplateInstruction<1> {
2310 explicit HReturn(HInstruction* value, uint32_t dex_pc = kNoDexPc)
2311 : HTemplateInstruction(SideEffects::None(), dex_pc) {
2312 SetRawInputAt(0, value);
2315 bool IsControlFlow() const OVERRIDE { return true; }
2317 DECLARE_INSTRUCTION(Return);
2320 DISALLOW_COPY_AND_ASSIGN(HReturn);
2323 class HPhi FINAL : public HInstruction {
2325 HPhi(ArenaAllocator* arena,
2326 uint32_t reg_number,
2327 size_t number_of_inputs,
2328 Primitive::Type type,
2329 uint32_t dex_pc = kNoDexPc)
2330 : HInstruction(SideEffects::None(), dex_pc),
2331 inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)),
2332 reg_number_(reg_number) {
2333 SetPackedField<TypeField>(ToPhiType(type));
2334 DCHECK_NE(GetType(), Primitive::kPrimVoid);
2335 // Phis are constructed live and marked dead if conflicting or unused.
2336 // Individual steps of SsaBuilder should assume that if a phi has been
2337 // marked dead, it can be ignored and will be removed by SsaPhiElimination.
2338 SetPackedFlag<kFlagIsLive>(true);
2339 SetPackedFlag<kFlagCanBeNull>(true);
2342 // Returns a type equivalent to the given `type`, but that a `HPhi` can hold.
2343 static Primitive::Type ToPhiType(Primitive::Type type) {
2344 return Primitive::PrimitiveKind(type);
2347 bool IsCatchPhi() const { return GetBlock()->IsCatchBlock(); }
2349 size_t InputCount() const OVERRIDE { return inputs_.size(); }
2351 void AddInput(HInstruction* input);
2352 void RemoveInputAt(size_t index);
2354 Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
2355 void SetType(Primitive::Type new_type) {
2356 // Make sure that only valid type changes occur. The following are allowed:
2357 // (1) int -> float/ref (primitive type propagation),
2358 // (2) long -> double (primitive type propagation).
2359 DCHECK(GetType() == new_type ||
2360 (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimFloat) ||
2361 (GetType() == Primitive::kPrimInt && new_type == Primitive::kPrimNot) ||
2362 (GetType() == Primitive::kPrimLong && new_type == Primitive::kPrimDouble));
2363 SetPackedField<TypeField>(new_type);
2366 bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
2367 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
2369 uint32_t GetRegNumber() const { return reg_number_; }
2371 void SetDead() { SetPackedFlag<kFlagIsLive>(false); }
2372 void SetLive() { SetPackedFlag<kFlagIsLive>(true); }
2373 bool IsDead() const { return !IsLive(); }
2374 bool IsLive() const { return GetPackedFlag<kFlagIsLive>(); }
2376 bool IsVRegEquivalentOf(HInstruction* other) const {
2377 return other != nullptr
2379 && other->AsPhi()->GetBlock() == GetBlock()
2380 && other->AsPhi()->GetRegNumber() == GetRegNumber();
2383 // Returns the next equivalent phi (starting from the current one) or null if there is none.
2384 // An equivalent phi is a phi having the same dex register and type.
2385 // It assumes that phis with the same dex register are adjacent.
2386 HPhi* GetNextEquivalentPhiWithSameType() {
2387 HInstruction* next = GetNext();
2388 while (next != nullptr && next->AsPhi()->GetRegNumber() == reg_number_) {
2389 if (next->GetType() == GetType()) {
2390 return next->AsPhi();
2392 next = next->GetNext();
2397 DECLARE_INSTRUCTION(Phi);
2400 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE {
2401 return inputs_[index];
2404 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE {
2405 inputs_[index] = input;
2409 static constexpr size_t kFieldType = HInstruction::kNumberOfGenericPackedBits;
2410 static constexpr size_t kFieldTypeSize =
2411 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
2412 static constexpr size_t kFlagIsLive = kFieldType + kFieldTypeSize;
2413 static constexpr size_t kFlagCanBeNull = kFlagIsLive + 1;
2414 static constexpr size_t kNumberOfPhiPackedBits = kFlagCanBeNull + 1;
2415 static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
2416 using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
2418 ArenaVector<HUserRecord<HInstruction*> > inputs_;
2419 const uint32_t reg_number_;
2421 DISALLOW_COPY_AND_ASSIGN(HPhi);
2424 // The exit instruction is the only instruction of the exit block.
2425 // Instructions aborting the method (HThrow and HReturn) must branch to the
2427 class HExit FINAL : public HTemplateInstruction<0> {
2429 explicit HExit(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2431 bool IsControlFlow() const OVERRIDE { return true; }
2433 DECLARE_INSTRUCTION(Exit);
2436 DISALLOW_COPY_AND_ASSIGN(HExit);
2439 // Jumps from one block to another.
2440 class HGoto FINAL : public HTemplateInstruction<0> {
2442 explicit HGoto(uint32_t dex_pc = kNoDexPc) : HTemplateInstruction(SideEffects::None(), dex_pc) {}
2444 bool IsControlFlow() const OVERRIDE { return true; }
2446 HBasicBlock* GetSuccessor() const {
2447 return GetBlock()->GetSingleSuccessor();
2450 DECLARE_INSTRUCTION(Goto);
2453 DISALLOW_COPY_AND_ASSIGN(HGoto);
2456 class HConstant : public HExpression<0> {
2458 explicit HConstant(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2459 : HExpression(type, SideEffects::None(), dex_pc) {}
2461 bool CanBeMoved() const OVERRIDE { return true; }
2463 // Is this constant -1 in the arithmetic sense?
2464 virtual bool IsMinusOne() const { return false; }
2465 // Is this constant 0 in the arithmetic sense?
2466 virtual bool IsArithmeticZero() const { return false; }
2467 // Is this constant a 0-bit pattern?
2468 virtual bool IsZeroBitPattern() const { return false; }
2469 // Is this constant 1 in the arithmetic sense?
2470 virtual bool IsOne() const { return false; }
2472 virtual uint64_t GetValueAsUint64() const = 0;
2474 DECLARE_ABSTRACT_INSTRUCTION(Constant);
2477 DISALLOW_COPY_AND_ASSIGN(HConstant);
2480 class HNullConstant FINAL : public HConstant {
2482 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2486 uint64_t GetValueAsUint64() const OVERRIDE { return 0; }
2488 size_t ComputeHashCode() const OVERRIDE { return 0; }
2490 // The null constant representation is a 0-bit pattern.
2491 virtual bool IsZeroBitPattern() const { return true; }
2493 DECLARE_INSTRUCTION(NullConstant);
2496 explicit HNullConstant(uint32_t dex_pc = kNoDexPc) : HConstant(Primitive::kPrimNot, dex_pc) {}
2498 friend class HGraph;
2499 DISALLOW_COPY_AND_ASSIGN(HNullConstant);
2502 // Constants of the type int. Those can be from Dex instructions, or
2503 // synthesized (for example with the if-eqz instruction).
2504 class HIntConstant FINAL : public HConstant {
2506 int32_t GetValue() const { return value_; }
2508 uint64_t GetValueAsUint64() const OVERRIDE {
2509 return static_cast<uint64_t>(static_cast<uint32_t>(value_));
2512 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2513 DCHECK(other->IsIntConstant()) << other->DebugName();
2514 return other->AsIntConstant()->value_ == value_;
2517 size_t ComputeHashCode() const OVERRIDE { return GetValue(); }
2519 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2520 bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2521 bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2522 bool IsOne() const OVERRIDE { return GetValue() == 1; }
2524 // Integer constants are used to encode Boolean values as well,
2525 // where 1 means true and 0 means false.
2526 bool IsTrue() const { return GetValue() == 1; }
2527 bool IsFalse() const { return GetValue() == 0; }
2529 DECLARE_INSTRUCTION(IntConstant);
2532 explicit HIntConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2533 : HConstant(Primitive::kPrimInt, dex_pc), value_(value) {}
2534 explicit HIntConstant(bool value, uint32_t dex_pc = kNoDexPc)
2535 : HConstant(Primitive::kPrimInt, dex_pc), value_(value ? 1 : 0) {}
2537 const int32_t value_;
2539 friend class HGraph;
2540 ART_FRIEND_TEST(GraphTest, InsertInstructionBefore);
2541 ART_FRIEND_TYPED_TEST(ParallelMoveTest, ConstantLast);
2542 DISALLOW_COPY_AND_ASSIGN(HIntConstant);
2545 class HLongConstant FINAL : public HConstant {
2547 int64_t GetValue() const { return value_; }
2549 uint64_t GetValueAsUint64() const OVERRIDE { return value_; }
2551 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2552 DCHECK(other->IsLongConstant()) << other->DebugName();
2553 return other->AsLongConstant()->value_ == value_;
2556 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2558 bool IsMinusOne() const OVERRIDE { return GetValue() == -1; }
2559 bool IsArithmeticZero() const OVERRIDE { return GetValue() == 0; }
2560 bool IsZeroBitPattern() const OVERRIDE { return GetValue() == 0; }
2561 bool IsOne() const OVERRIDE { return GetValue() == 1; }
2563 DECLARE_INSTRUCTION(LongConstant);
2566 explicit HLongConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2567 : HConstant(Primitive::kPrimLong, dex_pc), value_(value) {}
2569 const int64_t value_;
2571 friend class HGraph;
2572 DISALLOW_COPY_AND_ASSIGN(HLongConstant);
2575 class HFloatConstant FINAL : public HConstant {
2577 float GetValue() const { return value_; }
2579 uint64_t GetValueAsUint64() const OVERRIDE {
2580 return static_cast<uint64_t>(bit_cast<uint32_t, float>(value_));
2583 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2584 DCHECK(other->IsFloatConstant()) << other->DebugName();
2585 return other->AsFloatConstant()->GetValueAsUint64() == GetValueAsUint64();
2588 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2590 bool IsMinusOne() const OVERRIDE {
2591 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>((-1.0f));
2593 bool IsArithmeticZero() const OVERRIDE {
2594 return std::fpclassify(value_) == FP_ZERO;
2596 bool IsArithmeticPositiveZero() const {
2597 return IsArithmeticZero() && !std::signbit(value_);
2599 bool IsArithmeticNegativeZero() const {
2600 return IsArithmeticZero() && std::signbit(value_);
2602 bool IsZeroBitPattern() const OVERRIDE {
2603 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(0.0f);
2605 bool IsOne() const OVERRIDE {
2606 return bit_cast<uint32_t, float>(value_) == bit_cast<uint32_t, float>(1.0f);
2608 bool IsNaN() const {
2609 return std::isnan(value_);
2612 DECLARE_INSTRUCTION(FloatConstant);
2615 explicit HFloatConstant(float value, uint32_t dex_pc = kNoDexPc)
2616 : HConstant(Primitive::kPrimFloat, dex_pc), value_(value) {}
2617 explicit HFloatConstant(int32_t value, uint32_t dex_pc = kNoDexPc)
2618 : HConstant(Primitive::kPrimFloat, dex_pc), value_(bit_cast<float, int32_t>(value)) {}
2622 // Only the SsaBuilder and HGraph can create floating-point constants.
2623 friend class SsaBuilder;
2624 friend class HGraph;
2625 DISALLOW_COPY_AND_ASSIGN(HFloatConstant);
2628 class HDoubleConstant FINAL : public HConstant {
2630 double GetValue() const { return value_; }
2632 uint64_t GetValueAsUint64() const OVERRIDE { return bit_cast<uint64_t, double>(value_); }
2634 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2635 DCHECK(other->IsDoubleConstant()) << other->DebugName();
2636 return other->AsDoubleConstant()->GetValueAsUint64() == GetValueAsUint64();
2639 size_t ComputeHashCode() const OVERRIDE { return static_cast<size_t>(GetValue()); }
2641 bool IsMinusOne() const OVERRIDE {
2642 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((-1.0));
2644 bool IsArithmeticZero() const OVERRIDE {
2645 return std::fpclassify(value_) == FP_ZERO;
2647 bool IsArithmeticPositiveZero() const {
2648 return IsArithmeticZero() && !std::signbit(value_);
2650 bool IsArithmeticNegativeZero() const {
2651 return IsArithmeticZero() && std::signbit(value_);
2653 bool IsZeroBitPattern() const OVERRIDE {
2654 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>((0.0));
2656 bool IsOne() const OVERRIDE {
2657 return bit_cast<uint64_t, double>(value_) == bit_cast<uint64_t, double>(1.0);
2659 bool IsNaN() const {
2660 return std::isnan(value_);
2663 DECLARE_INSTRUCTION(DoubleConstant);
2666 explicit HDoubleConstant(double value, uint32_t dex_pc = kNoDexPc)
2667 : HConstant(Primitive::kPrimDouble, dex_pc), value_(value) {}
2668 explicit HDoubleConstant(int64_t value, uint32_t dex_pc = kNoDexPc)
2669 : HConstant(Primitive::kPrimDouble, dex_pc), value_(bit_cast<double, int64_t>(value)) {}
2671 const double value_;
2673 // Only the SsaBuilder and HGraph can create floating-point constants.
2674 friend class SsaBuilder;
2675 friend class HGraph;
2676 DISALLOW_COPY_AND_ASSIGN(HDoubleConstant);
2679 // Conditional branch. A block ending with an HIf instruction must have
2681 class HIf FINAL : public HTemplateInstruction<1> {
2683 explicit HIf(HInstruction* input, uint32_t dex_pc = kNoDexPc)
2684 : HTemplateInstruction(SideEffects::None(), dex_pc) {
2685 SetRawInputAt(0, input);
2688 bool IsControlFlow() const OVERRIDE { return true; }
2690 HBasicBlock* IfTrueSuccessor() const {
2691 return GetBlock()->GetSuccessors()[0];
2694 HBasicBlock* IfFalseSuccessor() const {
2695 return GetBlock()->GetSuccessors()[1];
2698 DECLARE_INSTRUCTION(If);
2701 DISALLOW_COPY_AND_ASSIGN(HIf);
2705 // Abstract instruction which marks the beginning and/or end of a try block and
2706 // links it to the respective exception handlers. Behaves the same as a Goto in
2707 // non-exceptional control flow.
2708 // Normal-flow successor is stored at index zero, exception handlers under
2709 // higher indices in no particular order.
2710 class HTryBoundary FINAL : public HTemplateInstruction<0> {
2712 enum class BoundaryKind {
2718 explicit HTryBoundary(BoundaryKind kind, uint32_t dex_pc = kNoDexPc)
2719 : HTemplateInstruction(SideEffects::None(), dex_pc) {
2720 SetPackedField<BoundaryKindField>(kind);
2723 bool IsControlFlow() const OVERRIDE { return true; }
2725 // Returns the block's non-exceptional successor (index zero).
2726 HBasicBlock* GetNormalFlowSuccessor() const { return GetBlock()->GetSuccessors()[0]; }
2728 ArrayRef<HBasicBlock* const> GetExceptionHandlers() const {
2729 return ArrayRef<HBasicBlock* const>(GetBlock()->GetSuccessors()).SubArray(1u);
2732 // Returns whether `handler` is among its exception handlers (non-zero index
2734 bool HasExceptionHandler(const HBasicBlock& handler) const {
2735 DCHECK(handler.IsCatchBlock());
2736 return GetBlock()->HasSuccessor(&handler, 1u /* Skip first successor. */);
2739 // If not present already, adds `handler` to its block's list of exception
2741 void AddExceptionHandler(HBasicBlock* handler) {
2742 if (!HasExceptionHandler(*handler)) {
2743 GetBlock()->AddSuccessor(handler);
2747 BoundaryKind GetBoundaryKind() const { return GetPackedField<BoundaryKindField>(); }
2748 bool IsEntry() const { return GetBoundaryKind() == BoundaryKind::kEntry; }
2750 bool HasSameExceptionHandlersAs(const HTryBoundary& other) const;
2752 DECLARE_INSTRUCTION(TryBoundary);
2755 static constexpr size_t kFieldBoundaryKind = kNumberOfGenericPackedBits;
2756 static constexpr size_t kFieldBoundaryKindSize =
2757 MinimumBitsToStore(static_cast<size_t>(BoundaryKind::kLast));
2758 static constexpr size_t kNumberOfTryBoundaryPackedBits =
2759 kFieldBoundaryKind + kFieldBoundaryKindSize;
2760 static_assert(kNumberOfTryBoundaryPackedBits <= kMaxNumberOfPackedBits,
2761 "Too many packed fields.");
2762 using BoundaryKindField = BitField<BoundaryKind, kFieldBoundaryKind, kFieldBoundaryKindSize>;
2764 DISALLOW_COPY_AND_ASSIGN(HTryBoundary);
2767 // Deoptimize to interpreter, upon checking a condition.
2768 class HDeoptimize FINAL : public HTemplateInstruction<1> {
2770 // We set CanTriggerGC to prevent any intermediate address to be live
2771 // at the point of the `HDeoptimize`.
2772 HDeoptimize(HInstruction* cond, uint32_t dex_pc)
2773 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
2774 SetRawInputAt(0, cond);
2777 bool CanBeMoved() const OVERRIDE { return true; }
2778 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2781 bool NeedsEnvironment() const OVERRIDE { return true; }
2782 bool CanThrow() const OVERRIDE { return true; }
2784 DECLARE_INSTRUCTION(Deoptimize);
2787 DISALLOW_COPY_AND_ASSIGN(HDeoptimize);
2790 // Represents the ArtMethod that was passed as a first argument to
2791 // the method. It is used by instructions that depend on it, like
2792 // instructions that work with the dex cache.
2793 class HCurrentMethod FINAL : public HExpression<0> {
2795 explicit HCurrentMethod(Primitive::Type type, uint32_t dex_pc = kNoDexPc)
2796 : HExpression(type, SideEffects::None(), dex_pc) {}
2798 DECLARE_INSTRUCTION(CurrentMethod);
2801 DISALLOW_COPY_AND_ASSIGN(HCurrentMethod);
2804 // Fetches an ArtMethod from the virtual table or the interface method table
2806 class HClassTableGet FINAL : public HExpression<1> {
2808 enum class TableKind {
2813 HClassTableGet(HInstruction* cls,
2814 Primitive::Type type,
2818 : HExpression(type, SideEffects::None(), dex_pc),
2820 SetPackedField<TableKindField>(kind);
2821 SetRawInputAt(0, cls);
2824 bool CanBeMoved() const OVERRIDE { return true; }
2825 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
2826 return other->AsClassTableGet()->GetIndex() == index_ &&
2827 other->AsClassTableGet()->GetPackedFields() == GetPackedFields();
2830 TableKind GetTableKind() const { return GetPackedField<TableKindField>(); }
2831 size_t GetIndex() const { return index_; }
2833 DECLARE_INSTRUCTION(ClassTableGet);
2836 static constexpr size_t kFieldTableKind = kNumberOfExpressionPackedBits;
2837 static constexpr size_t kFieldTableKindSize =
2838 MinimumBitsToStore(static_cast<size_t>(TableKind::kLast));
2839 static constexpr size_t kNumberOfClassTableGetPackedBits = kFieldTableKind + kFieldTableKindSize;
2840 static_assert(kNumberOfClassTableGetPackedBits <= kMaxNumberOfPackedBits,
2841 "Too many packed fields.");
2842 using TableKindField = BitField<TableKind, kFieldTableKind, kFieldTableKind>;
2844 // The index of the ArtMethod in the table.
2845 const size_t index_;
2847 DISALLOW_COPY_AND_ASSIGN(HClassTableGet);
2850 // PackedSwitch (jump table). A block ending with a PackedSwitch instruction will
2851 // have one successor for each entry in the switch table, and the final successor
2852 // will be the block containing the next Dex opcode.
2853 class HPackedSwitch FINAL : public HTemplateInstruction<1> {
2855 HPackedSwitch(int32_t start_value,
2856 uint32_t num_entries,
2857 HInstruction* input,
2858 uint32_t dex_pc = kNoDexPc)
2859 : HTemplateInstruction(SideEffects::None(), dex_pc),
2860 start_value_(start_value),
2861 num_entries_(num_entries) {
2862 SetRawInputAt(0, input);
2865 bool IsControlFlow() const OVERRIDE { return true; }
2867 int32_t GetStartValue() const { return start_value_; }
2869 uint32_t GetNumEntries() const { return num_entries_; }
2871 HBasicBlock* GetDefaultBlock() const {
2872 // Last entry is the default block.
2873 return GetBlock()->GetSuccessors()[num_entries_];
2875 DECLARE_INSTRUCTION(PackedSwitch);
2878 const int32_t start_value_;
2879 const uint32_t num_entries_;
2881 DISALLOW_COPY_AND_ASSIGN(HPackedSwitch);
2884 class HUnaryOperation : public HExpression<1> {
2886 HUnaryOperation(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
2887 : HExpression(result_type, SideEffects::None(), dex_pc) {
2888 SetRawInputAt(0, input);
2891 HInstruction* GetInput() const { return InputAt(0); }
2892 Primitive::Type GetResultType() const { return GetType(); }
2894 bool CanBeMoved() const OVERRIDE { return true; }
2895 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2899 // Try to statically evaluate `this` and return a HConstant
2900 // containing the result of this evaluation. If `this` cannot
2901 // be evaluated as a constant, return null.
2902 HConstant* TryStaticEvaluation() const;
2904 // Apply this operation to `x`.
2905 virtual HConstant* Evaluate(HIntConstant* x) const = 0;
2906 virtual HConstant* Evaluate(HLongConstant* x) const = 0;
2907 virtual HConstant* Evaluate(HFloatConstant* x) const = 0;
2908 virtual HConstant* Evaluate(HDoubleConstant* x) const = 0;
2910 DECLARE_ABSTRACT_INSTRUCTION(UnaryOperation);
2913 DISALLOW_COPY_AND_ASSIGN(HUnaryOperation);
2916 class HBinaryOperation : public HExpression<2> {
2918 HBinaryOperation(Primitive::Type result_type,
2920 HInstruction* right,
2921 SideEffects side_effects = SideEffects::None(),
2922 uint32_t dex_pc = kNoDexPc)
2923 : HExpression(result_type, side_effects, dex_pc) {
2924 SetRawInputAt(0, left);
2925 SetRawInputAt(1, right);
2928 HInstruction* GetLeft() const { return InputAt(0); }
2929 HInstruction* GetRight() const { return InputAt(1); }
2930 Primitive::Type GetResultType() const { return GetType(); }
2932 virtual bool IsCommutative() const { return false; }
2934 // Put constant on the right.
2935 // Returns whether order is changed.
2936 bool OrderInputsWithConstantOnTheRight() {
2937 HInstruction* left = InputAt(0);
2938 HInstruction* right = InputAt(1);
2939 if (left->IsConstant() && !right->IsConstant()) {
2940 ReplaceInput(right, 0);
2941 ReplaceInput(left, 1);
2947 // Order inputs by instruction id, but favor constant on the right side.
2948 // This helps GVN for commutative ops.
2949 void OrderInputs() {
2950 DCHECK(IsCommutative());
2951 HInstruction* left = InputAt(0);
2952 HInstruction* right = InputAt(1);
2953 if (left == right || (!left->IsConstant() && right->IsConstant())) {
2956 if (OrderInputsWithConstantOnTheRight()) {
2959 // Order according to instruction id.
2960 if (left->GetId() > right->GetId()) {
2961 ReplaceInput(right, 0);
2962 ReplaceInput(left, 1);
2966 bool CanBeMoved() const OVERRIDE { return true; }
2967 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
2971 // Try to statically evaluate `this` and return a HConstant
2972 // containing the result of this evaluation. If `this` cannot
2973 // be evaluated as a constant, return null.
2974 HConstant* TryStaticEvaluation() const;
2976 // Apply this operation to `x` and `y`.
2977 virtual HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
2978 HNullConstant* y ATTRIBUTE_UNUSED) const {
2979 LOG(FATAL) << DebugName() << " is not defined for the (null, null) case.";
2982 virtual HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const = 0;
2983 virtual HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const = 0;
2984 virtual HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED,
2985 HIntConstant* y ATTRIBUTE_UNUSED) const {
2986 LOG(FATAL) << DebugName() << " is not defined for the (long, int) case.";
2989 virtual HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const = 0;
2990 virtual HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const = 0;
2992 // Returns an input that can legally be used as the right input and is
2993 // constant, or null.
2994 HConstant* GetConstantRight() const;
2996 // If `GetConstantRight()` returns one of the input, this returns the other
2997 // one. Otherwise it returns null.
2998 HInstruction* GetLeastConstantLeft() const;
3000 DECLARE_ABSTRACT_INSTRUCTION(BinaryOperation);
3003 DISALLOW_COPY_AND_ASSIGN(HBinaryOperation);
3006 // The comparison bias applies for floating point operations and indicates how NaN
3007 // comparisons are treated:
3008 enum class ComparisonBias {
3009 kNoBias, // bias is not applicable (i.e. for long operation)
3010 kGtBias, // return 1 for NaN comparisons
3011 kLtBias, // return -1 for NaN comparisons
3015 std::ostream& operator<<(std::ostream& os, const ComparisonBias& rhs);
3017 class HCondition : public HBinaryOperation {
3019 HCondition(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3020 : HBinaryOperation(Primitive::kPrimBoolean, first, second, SideEffects::None(), dex_pc) {
3021 SetPackedField<ComparisonBiasField>(ComparisonBias::kNoBias);
3024 // For code generation purposes, returns whether this instruction is just before
3025 // `instruction`, and disregard moves in between.
3026 bool IsBeforeWhenDisregardMoves(HInstruction* instruction) const;
3028 DECLARE_ABSTRACT_INSTRUCTION(Condition);
3030 virtual IfCondition GetCondition() const = 0;
3032 virtual IfCondition GetOppositeCondition() const = 0;
3034 bool IsGtBias() const { return GetBias() == ComparisonBias::kGtBias; }
3035 bool IsLtBias() const { return GetBias() == ComparisonBias::kLtBias; }
3037 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3038 void SetBias(ComparisonBias bias) { SetPackedField<ComparisonBiasField>(bias); }
3040 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3041 return GetPackedFields() == other->AsCondition()->GetPackedFields();
3044 bool IsFPConditionTrueIfNaN() const {
3045 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3046 IfCondition if_cond = GetCondition();
3047 if (if_cond == kCondNE) {
3049 } else if (if_cond == kCondEQ) {
3052 return ((if_cond == kCondGT) || (if_cond == kCondGE)) && IsGtBias();
3055 bool IsFPConditionFalseIfNaN() const {
3056 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3057 IfCondition if_cond = GetCondition();
3058 if (if_cond == kCondEQ) {
3060 } else if (if_cond == kCondNE) {
3063 return ((if_cond == kCondLT) || (if_cond == kCondLE)) && IsGtBias();
3067 // Needed if we merge a HCompare into a HCondition.
3068 static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3069 static constexpr size_t kFieldComparisonBiasSize =
3070 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3071 static constexpr size_t kNumberOfConditionPackedBits =
3072 kFieldComparisonBias + kFieldComparisonBiasSize;
3073 static_assert(kNumberOfConditionPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3074 using ComparisonBiasField =
3075 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3077 template <typename T>
3078 int32_t Compare(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3080 template <typename T>
3081 int32_t CompareFP(T x, T y) const {
3082 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3083 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3085 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compare(x, y);
3088 // Return an integer constant containing the result of a condition evaluated at compile time.
3089 HIntConstant* MakeConstantCondition(bool value, uint32_t dex_pc) const {
3090 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3094 DISALLOW_COPY_AND_ASSIGN(HCondition);
3097 // Instruction to check if two inputs are equal to each other.
3098 class HEqual FINAL : public HCondition {
3100 HEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3101 : HCondition(first, second, dex_pc) {}
3103 bool IsCommutative() const OVERRIDE { return true; }
3105 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3106 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3107 return MakeConstantCondition(true, GetDexPc());
3109 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3110 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3112 // In the following Evaluate methods, a HCompare instruction has
3113 // been merged into this HEqual instruction; evaluate it as
3114 // `Compare(x, y) == 0`.
3115 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3116 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0),
3119 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3120 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3122 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3123 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3126 DECLARE_INSTRUCTION(Equal);
3128 IfCondition GetCondition() const OVERRIDE {
3132 IfCondition GetOppositeCondition() const OVERRIDE {
3137 template <typename T> bool Compute(T x, T y) const { return x == y; }
3139 DISALLOW_COPY_AND_ASSIGN(HEqual);
3142 class HNotEqual FINAL : public HCondition {
3144 HNotEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3145 : HCondition(first, second, dex_pc) {}
3147 bool IsCommutative() const OVERRIDE { return true; }
3149 HConstant* Evaluate(HNullConstant* x ATTRIBUTE_UNUSED,
3150 HNullConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3151 return MakeConstantCondition(false, GetDexPc());
3153 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3154 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3156 // In the following Evaluate methods, a HCompare instruction has
3157 // been merged into this HNotEqual instruction; evaluate it as
3158 // `Compare(x, y) != 0`.
3159 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3160 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3162 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3163 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3165 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3166 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3169 DECLARE_INSTRUCTION(NotEqual);
3171 IfCondition GetCondition() const OVERRIDE {
3175 IfCondition GetOppositeCondition() const OVERRIDE {
3180 template <typename T> bool Compute(T x, T y) const { return x != y; }
3182 DISALLOW_COPY_AND_ASSIGN(HNotEqual);
3185 class HLessThan FINAL : public HCondition {
3187 HLessThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3188 : HCondition(first, second, dex_pc) {}
3190 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3191 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3193 // In the following Evaluate methods, a HCompare instruction has
3194 // been merged into this HLessThan instruction; evaluate it as
3195 // `Compare(x, y) < 0`.
3196 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3197 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3199 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3200 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3202 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3203 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3206 DECLARE_INSTRUCTION(LessThan);
3208 IfCondition GetCondition() const OVERRIDE {
3212 IfCondition GetOppositeCondition() const OVERRIDE {
3217 template <typename T> bool Compute(T x, T y) const { return x < y; }
3219 DISALLOW_COPY_AND_ASSIGN(HLessThan);
3222 class HLessThanOrEqual FINAL : public HCondition {
3224 HLessThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3225 : HCondition(first, second, dex_pc) {}
3227 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3228 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3230 // In the following Evaluate methods, a HCompare instruction has
3231 // been merged into this HLessThanOrEqual instruction; evaluate it as
3232 // `Compare(x, y) <= 0`.
3233 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3234 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3236 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3237 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3239 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3240 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3243 DECLARE_INSTRUCTION(LessThanOrEqual);
3245 IfCondition GetCondition() const OVERRIDE {
3249 IfCondition GetOppositeCondition() const OVERRIDE {
3254 template <typename T> bool Compute(T x, T y) const { return x <= y; }
3256 DISALLOW_COPY_AND_ASSIGN(HLessThanOrEqual);
3259 class HGreaterThan FINAL : public HCondition {
3261 HGreaterThan(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3262 : HCondition(first, second, dex_pc) {}
3264 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3265 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3267 // In the following Evaluate methods, a HCompare instruction has
3268 // been merged into this HGreaterThan instruction; evaluate it as
3269 // `Compare(x, y) > 0`.
3270 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3271 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3273 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3274 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3276 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3277 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3280 DECLARE_INSTRUCTION(GreaterThan);
3282 IfCondition GetCondition() const OVERRIDE {
3286 IfCondition GetOppositeCondition() const OVERRIDE {
3291 template <typename T> bool Compute(T x, T y) const { return x > y; }
3293 DISALLOW_COPY_AND_ASSIGN(HGreaterThan);
3296 class HGreaterThanOrEqual FINAL : public HCondition {
3298 HGreaterThanOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3299 : HCondition(first, second, dex_pc) {}
3301 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3302 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3304 // In the following Evaluate methods, a HCompare instruction has
3305 // been merged into this HGreaterThanOrEqual instruction; evaluate it as
3306 // `Compare(x, y) >= 0`.
3307 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3308 return MakeConstantCondition(Compute(Compare(x->GetValue(), y->GetValue()), 0), GetDexPc());
3310 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3311 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3313 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3314 return MakeConstantCondition(Compute(CompareFP(x->GetValue(), y->GetValue()), 0), GetDexPc());
3317 DECLARE_INSTRUCTION(GreaterThanOrEqual);
3319 IfCondition GetCondition() const OVERRIDE {
3323 IfCondition GetOppositeCondition() const OVERRIDE {
3328 template <typename T> bool Compute(T x, T y) const { return x >= y; }
3330 DISALLOW_COPY_AND_ASSIGN(HGreaterThanOrEqual);
3333 class HBelow FINAL : public HCondition {
3335 HBelow(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3336 : HCondition(first, second, dex_pc) {}
3338 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3339 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3341 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3342 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3344 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3345 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3346 LOG(FATAL) << DebugName() << " is not defined for float values";
3349 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3350 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3351 LOG(FATAL) << DebugName() << " is not defined for double values";
3355 DECLARE_INSTRUCTION(Below);
3357 IfCondition GetCondition() const OVERRIDE {
3361 IfCondition GetOppositeCondition() const OVERRIDE {
3366 template <typename T> bool Compute(T x, T y) const {
3367 return MakeUnsigned(x) < MakeUnsigned(y);
3370 DISALLOW_COPY_AND_ASSIGN(HBelow);
3373 class HBelowOrEqual FINAL : public HCondition {
3375 HBelowOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3376 : HCondition(first, second, dex_pc) {}
3378 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3379 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3381 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3382 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3384 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3385 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3386 LOG(FATAL) << DebugName() << " is not defined for float values";
3389 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3390 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3391 LOG(FATAL) << DebugName() << " is not defined for double values";
3395 DECLARE_INSTRUCTION(BelowOrEqual);
3397 IfCondition GetCondition() const OVERRIDE {
3401 IfCondition GetOppositeCondition() const OVERRIDE {
3406 template <typename T> bool Compute(T x, T y) const {
3407 return MakeUnsigned(x) <= MakeUnsigned(y);
3410 DISALLOW_COPY_AND_ASSIGN(HBelowOrEqual);
3413 class HAbove FINAL : public HCondition {
3415 HAbove(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3416 : HCondition(first, second, dex_pc) {}
3418 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3419 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3421 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3422 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3424 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3425 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3426 LOG(FATAL) << DebugName() << " is not defined for float values";
3429 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3430 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3431 LOG(FATAL) << DebugName() << " is not defined for double values";
3435 DECLARE_INSTRUCTION(Above);
3437 IfCondition GetCondition() const OVERRIDE {
3441 IfCondition GetOppositeCondition() const OVERRIDE {
3446 template <typename T> bool Compute(T x, T y) const {
3447 return MakeUnsigned(x) > MakeUnsigned(y);
3450 DISALLOW_COPY_AND_ASSIGN(HAbove);
3453 class HAboveOrEqual FINAL : public HCondition {
3455 HAboveOrEqual(HInstruction* first, HInstruction* second, uint32_t dex_pc = kNoDexPc)
3456 : HCondition(first, second, dex_pc) {}
3458 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3459 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3461 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3462 return MakeConstantCondition(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3464 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
3465 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3466 LOG(FATAL) << DebugName() << " is not defined for float values";
3469 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
3470 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
3471 LOG(FATAL) << DebugName() << " is not defined for double values";
3475 DECLARE_INSTRUCTION(AboveOrEqual);
3477 IfCondition GetCondition() const OVERRIDE {
3481 IfCondition GetOppositeCondition() const OVERRIDE {
3486 template <typename T> bool Compute(T x, T y) const {
3487 return MakeUnsigned(x) >= MakeUnsigned(y);
3490 DISALLOW_COPY_AND_ASSIGN(HAboveOrEqual);
3493 // Instruction to check how two inputs compare to each other.
3494 // Result is 0 if input0 == input1, 1 if input0 > input1, or -1 if input0 < input1.
3495 class HCompare FINAL : public HBinaryOperation {
3497 // Note that `comparison_type` is the type of comparison performed
3498 // between the comparison's inputs, not the type of the instantiated
3499 // HCompare instruction (which is always Primitive::kPrimInt).
3500 HCompare(Primitive::Type comparison_type,
3501 HInstruction* first,
3502 HInstruction* second,
3503 ComparisonBias bias,
3505 : HBinaryOperation(Primitive::kPrimInt,
3508 SideEffectsForArchRuntimeCalls(comparison_type),
3510 SetPackedField<ComparisonBiasField>(bias);
3511 DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(first->GetType()));
3512 DCHECK_EQ(comparison_type, Primitive::PrimitiveKind(second->GetType()));
3515 template <typename T>
3516 int32_t Compute(T x, T y) const { return x > y ? 1 : (x < y ? -1 : 0); }
3518 template <typename T>
3519 int32_t ComputeFP(T x, T y) const {
3520 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3521 DCHECK_NE(GetBias(), ComparisonBias::kNoBias);
3523 return std::isunordered(x, y) ? (IsGtBias() ? 1 : -1) : Compute(x, y);
3526 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
3527 // Note that there is no "cmp-int" Dex instruction so we shouldn't
3528 // reach this code path when processing a freshly built HIR
3529 // graph. However HCompare integer instructions can be synthesized
3530 // by the instruction simplifier to implement IntegerCompare and
3531 // IntegerSignum intrinsics, so we have to handle this case.
3532 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3534 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
3535 return MakeConstantComparison(Compute(x->GetValue(), y->GetValue()), GetDexPc());
3537 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
3538 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3540 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
3541 return MakeConstantComparison(ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
3544 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3545 return GetPackedFields() == other->AsCompare()->GetPackedFields();
3548 ComparisonBias GetBias() const { return GetPackedField<ComparisonBiasField>(); }
3550 // Does this compare instruction have a "gt bias" (vs an "lt bias")?
3551 // Only meaningful for floating-point comparisons.
3552 bool IsGtBias() const {
3553 DCHECK(Primitive::IsFloatingPointType(InputAt(0)->GetType())) << InputAt(0)->GetType();
3554 return GetBias() == ComparisonBias::kGtBias;
3557 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type type ATTRIBUTE_UNUSED) {
3558 // Comparisons do not require a runtime call in any back end.
3559 return SideEffects::None();
3562 DECLARE_INSTRUCTION(Compare);
3565 static constexpr size_t kFieldComparisonBias = kNumberOfExpressionPackedBits;
3566 static constexpr size_t kFieldComparisonBiasSize =
3567 MinimumBitsToStore(static_cast<size_t>(ComparisonBias::kLast));
3568 static constexpr size_t kNumberOfComparePackedBits =
3569 kFieldComparisonBias + kFieldComparisonBiasSize;
3570 static_assert(kNumberOfComparePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3571 using ComparisonBiasField =
3572 BitField<ComparisonBias, kFieldComparisonBias, kFieldComparisonBiasSize>;
3574 // Return an integer constant containing the result of a comparison evaluated at compile time.
3575 HIntConstant* MakeConstantComparison(int32_t value, uint32_t dex_pc) const {
3576 DCHECK(value == -1 || value == 0 || value == 1) << value;
3577 return GetBlock()->GetGraph()->GetIntConstant(value, dex_pc);
3581 DISALLOW_COPY_AND_ASSIGN(HCompare);
3584 class HNewInstance FINAL : public HExpression<2> {
3586 HNewInstance(HInstruction* cls,
3587 HCurrentMethod* current_method,
3589 uint16_t type_index,
3590 const DexFile& dex_file,
3591 bool needs_access_check,
3593 QuickEntrypointEnum entrypoint)
3594 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
3595 type_index_(type_index),
3596 dex_file_(dex_file),
3597 entrypoint_(entrypoint) {
3598 SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
3599 SetPackedFlag<kFlagFinalizable>(finalizable);
3600 SetRawInputAt(0, cls);
3601 SetRawInputAt(1, current_method);
3604 uint16_t GetTypeIndex() const { return type_index_; }
3605 const DexFile& GetDexFile() const { return dex_file_; }
3607 // Calls runtime so needs an environment.
3608 bool NeedsEnvironment() const OVERRIDE { return true; }
3610 // Can throw errors when out-of-memory or if it's not instantiable/accessible.
3611 bool CanThrow() const OVERRIDE { return true; }
3613 // Needs to call into runtime to make sure it's instantiable/accessible.
3614 bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
3616 bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
3618 bool CanBeNull() const OVERRIDE { return false; }
3620 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
3622 void SetEntrypoint(QuickEntrypointEnum entrypoint) {
3623 entrypoint_ = entrypoint;
3626 bool IsStringAlloc() const;
3628 DECLARE_INSTRUCTION(NewInstance);
3631 static constexpr size_t kFlagNeedsAccessCheck = kNumberOfExpressionPackedBits;
3632 static constexpr size_t kFlagFinalizable = kFlagNeedsAccessCheck + 1;
3633 static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
3634 static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
3635 "Too many packed fields.");
3637 const uint16_t type_index_;
3638 const DexFile& dex_file_;
3639 QuickEntrypointEnum entrypoint_;
3641 DISALLOW_COPY_AND_ASSIGN(HNewInstance);
3644 enum class Intrinsics {
3645 #define OPTIMIZING_INTRINSICS(Name, IsStatic, NeedsEnvironmentOrCache, SideEffects, Exceptions) \
3647 #include "intrinsics_list.h"
3649 INTRINSICS_LIST(OPTIMIZING_INTRINSICS)
3650 #undef INTRINSICS_LIST
3651 #undef OPTIMIZING_INTRINSICS
3653 std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic);
3655 enum IntrinsicNeedsEnvironmentOrCache {
3656 kNoEnvironmentOrCache, // Intrinsic does not require an environment or dex cache.
3657 kNeedsEnvironmentOrCache // Intrinsic requires an environment or requires a dex cache.
3660 enum IntrinsicSideEffects {
3661 kNoSideEffects, // Intrinsic does not have any heap memory side effects.
3662 kReadSideEffects, // Intrinsic may read heap memory.
3663 kWriteSideEffects, // Intrinsic may write heap memory.
3664 kAllSideEffects // Intrinsic may read or write heap memory, or trigger GC.
3667 enum IntrinsicExceptions {
3668 kNoThrow, // Intrinsic does not throw any exceptions.
3669 kCanThrow // Intrinsic may throw exceptions.
3672 class HInvoke : public HInstruction {
3674 size_t InputCount() const OVERRIDE { return inputs_.size(); }
3676 bool NeedsEnvironment() const OVERRIDE;
3678 void SetArgumentAt(size_t index, HInstruction* argument) {
3679 SetRawInputAt(index, argument);
3682 // Return the number of arguments. This number can be lower than
3683 // the number of inputs returned by InputCount(), as some invoke
3684 // instructions (e.g. HInvokeStaticOrDirect) can have non-argument
3685 // inputs at the end of their list of inputs.
3686 uint32_t GetNumberOfArguments() const { return number_of_arguments_; }
3688 Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
3690 uint32_t GetDexMethodIndex() const { return dex_method_index_; }
3691 const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
3693 InvokeType GetOriginalInvokeType() const {
3694 return GetPackedField<OriginalInvokeTypeField>();
3697 Intrinsics GetIntrinsic() const {
3701 void SetIntrinsic(Intrinsics intrinsic,
3702 IntrinsicNeedsEnvironmentOrCache needs_env_or_cache,
3703 IntrinsicSideEffects side_effects,
3704 IntrinsicExceptions exceptions);
3706 bool IsFromInlinedInvoke() const {
3707 return GetEnvironment()->IsFromInlinedInvoke();
3710 bool CanThrow() const OVERRIDE { return GetPackedFlag<kFlagCanThrow>(); }
3712 bool CanBeMoved() const OVERRIDE { return IsIntrinsic(); }
3714 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
3715 return intrinsic_ != Intrinsics::kNone && intrinsic_ == other->AsInvoke()->intrinsic_;
3718 uint32_t* GetIntrinsicOptimizations() {
3719 return &intrinsic_optimizations_;
3722 const uint32_t* GetIntrinsicOptimizations() const {
3723 return &intrinsic_optimizations_;
3726 bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
3728 DECLARE_ABSTRACT_INSTRUCTION(Invoke);
3731 static constexpr size_t kFieldOriginalInvokeType = kNumberOfGenericPackedBits;
3732 static constexpr size_t kFieldOriginalInvokeTypeSize =
3733 MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
3734 static constexpr size_t kFieldReturnType =
3735 kFieldOriginalInvokeType + kFieldOriginalInvokeTypeSize;
3736 static constexpr size_t kFieldReturnTypeSize =
3737 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
3738 static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
3739 static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
3740 static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
3741 using OriginalInvokeTypeField =
3742 BitField<InvokeType, kFieldOriginalInvokeType, kFieldOriginalInvokeTypeSize>;
3743 using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
3745 HInvoke(ArenaAllocator* arena,
3746 uint32_t number_of_arguments,
3747 uint32_t number_of_other_inputs,
3748 Primitive::Type return_type,
3750 uint32_t dex_method_index,
3751 InvokeType original_invoke_type)
3753 SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays.
3754 number_of_arguments_(number_of_arguments),
3755 inputs_(number_of_arguments + number_of_other_inputs,
3756 arena->Adapter(kArenaAllocInvokeInputs)),
3757 dex_method_index_(dex_method_index),
3758 intrinsic_(Intrinsics::kNone),
3759 intrinsic_optimizations_(0) {
3760 SetPackedField<ReturnTypeField>(return_type);
3761 SetPackedField<OriginalInvokeTypeField>(original_invoke_type);
3762 SetPackedFlag<kFlagCanThrow>(true);
3765 const HUserRecord<HInstruction*> InputRecordAt(size_t index) const OVERRIDE {
3766 return inputs_[index];
3769 void SetRawInputRecordAt(size_t index, const HUserRecord<HInstruction*>& input) OVERRIDE {
3770 inputs_[index] = input;
3773 void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
3775 uint32_t number_of_arguments_;
3776 ArenaVector<HUserRecord<HInstruction*>> inputs_;
3777 const uint32_t dex_method_index_;
3778 Intrinsics intrinsic_;
3780 // A magic word holding optimizations for intrinsics. See intrinsics.h.
3781 uint32_t intrinsic_optimizations_;
3784 DISALLOW_COPY_AND_ASSIGN(HInvoke);
3787 class HInvokeUnresolved FINAL : public HInvoke {
3789 HInvokeUnresolved(ArenaAllocator* arena,
3790 uint32_t number_of_arguments,
3791 Primitive::Type return_type,
3793 uint32_t dex_method_index,
3794 InvokeType invoke_type)
3796 number_of_arguments,
3797 0u /* number_of_other_inputs */,
3804 DECLARE_INSTRUCTION(InvokeUnresolved);
3807 DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
3810 class HInvokeStaticOrDirect FINAL : public HInvoke {
3812 // Requirements of this method call regarding the class
3813 // initialization (clinit) check of its declaring class.
3814 enum class ClinitCheckRequirement {
3815 kNone, // Class already initialized.
3816 kExplicit, // Static call having explicit clinit check as last input.
3817 kImplicit, // Static call implicitly requiring a clinit check.
3821 // Determines how to load the target ArtMethod*.
3822 enum class MethodLoadKind {
3823 // Use a String init ArtMethod* loaded from Thread entrypoints.
3826 // Use the method's own ArtMethod* loaded by the register allocator.
3829 // Use ArtMethod* at a known address, embed the direct address in the code.
3830 // Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
3833 // Use ArtMethod* at an address that will be known at link time, embed the direct
3834 // address in the code. If the image is relocatable, emit .patch_oat entry.
3835 // Used for app->boot calls with relocatable image and boot->boot calls, whether
3836 // the image relocatable or not.
3837 kDirectAddressWithFixup,
3839 // Load from resolved methods array in the dex cache using a PC-relative load.
3840 // Used when we need to use the dex cache, for example for invoke-static that
3841 // may cause class initialization (the entry may point to a resolution method),
3842 // and we know that we can access the dex cache arrays using a PC-relative load.
3843 kDexCachePcRelative,
3845 // Use ArtMethod* from the resolved methods of the compiled method's own ArtMethod*.
3846 // Used for JIT when we need to use the dex cache. This is also the last-resort-kind
3847 // used when other kinds are unavailable (say, dex cache arrays are not PC-relative)
3848 // or unimplemented or impractical (i.e. slow) on a particular architecture.
3852 // Determines the location of the code pointer.
3853 enum class CodePtrLocation {
3854 // Recursive call, use local PC-relative call instruction.
3857 // Use PC-relative call instruction patched at link time.
3858 // Used for calls within an oat file, boot->boot or app->app.
3861 // Call to a known target address, embed the direct address in code.
3862 // Used for app->boot call with non-relocatable image and for JIT-compiled calls.
3865 // Call to a target address that will be known at link time, embed the direct
3866 // address in code. If the image is relocatable, emit .patch_oat entry.
3867 // Used for app->boot calls with relocatable image and boot->boot calls, whether
3868 // the image relocatable or not.
3869 kCallDirectWithFixup,
3871 // Use code pointer from the ArtMethod*.
3872 // Used when we don't know the target code. This is also the last-resort-kind used when
3873 // other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
3877 struct DispatchInfo {
3878 MethodLoadKind method_load_kind;
3879 CodePtrLocation code_ptr_location;
3880 // The method load data holds
3881 // - thread entrypoint offset for kStringInit method if this is a string init invoke.
3882 // Note that there are multiple string init methods, each having its own offset.
3883 // - the method address for kDirectAddress
3884 // - the dex cache arrays offset for kDexCachePcRel.
3885 uint64_t method_load_data;
3886 uint64_t direct_code_ptr;
3889 HInvokeStaticOrDirect(ArenaAllocator* arena,
3890 uint32_t number_of_arguments,
3891 Primitive::Type return_type,
3893 uint32_t method_index,
3894 MethodReference target_method,
3895 DispatchInfo dispatch_info,
3896 InvokeType original_invoke_type,
3897 InvokeType optimized_invoke_type,
3898 ClinitCheckRequirement clinit_check_requirement)
3900 number_of_arguments,
3901 // There is potentially one extra argument for the HCurrentMethod node, and
3902 // potentially one other if the clinit check is explicit, and potentially
3903 // one other if the method is a string factory.
3904 (NeedsCurrentMethodInput(dispatch_info.method_load_kind) ? 1u : 0u) +
3905 (clinit_check_requirement == ClinitCheckRequirement::kExplicit ? 1u : 0u),
3909 original_invoke_type),
3910 target_method_(target_method),
3911 dispatch_info_(dispatch_info) {
3912 SetPackedField<OptimizedInvokeTypeField>(optimized_invoke_type);
3913 SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
3916 void SetDispatchInfo(const DispatchInfo& dispatch_info) {
3917 bool had_current_method_input = HasCurrentMethodInput();
3918 bool needs_current_method_input = NeedsCurrentMethodInput(dispatch_info.method_load_kind);
3920 // Using the current method is the default and once we find a better
3921 // method load kind, we should not go back to using the current method.
3922 DCHECK(had_current_method_input || !needs_current_method_input);
3924 if (had_current_method_input && !needs_current_method_input) {
3925 DCHECK_EQ(InputAt(GetSpecialInputIndex()), GetBlock()->GetGraph()->GetCurrentMethod());
3926 RemoveInputAt(GetSpecialInputIndex());
3928 dispatch_info_ = dispatch_info;
3931 void AddSpecialInput(HInstruction* input) {
3932 // We allow only one special input.
3933 DCHECK(!IsStringInit() && !HasCurrentMethodInput());
3934 DCHECK(InputCount() == GetSpecialInputIndex() ||
3935 (InputCount() == GetSpecialInputIndex() + 1 && IsStaticWithExplicitClinitCheck()));
3936 InsertInputAt(GetSpecialInputIndex(), input);
3939 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
3940 // We access the method via the dex cache so we can't do an implicit null check.
3941 // TODO: for intrinsics we can generate implicit null checks.
3945 bool CanBeNull() const OVERRIDE {
3946 return GetPackedField<ReturnTypeField>() == Primitive::kPrimNot && !IsStringInit();
3949 // Get the index of the special input, if any.
3951 // If the invoke HasCurrentMethodInput(), the "special input" is the current
3952 // method pointer; otherwise there may be one platform-specific special input,
3953 // such as PC-relative addressing base.
3954 uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
3955 bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
3957 InvokeType GetOptimizedInvokeType() const {
3958 return GetPackedField<OptimizedInvokeTypeField>();
3961 void SetOptimizedInvokeType(InvokeType invoke_type) {
3962 SetPackedField<OptimizedInvokeTypeField>(invoke_type);
3965 MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
3966 CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
3967 bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
3968 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE;
3969 bool IsStringInit() const { return GetMethodLoadKind() == MethodLoadKind::kStringInit; }
3970 bool HasMethodAddress() const { return GetMethodLoadKind() == MethodLoadKind::kDirectAddress; }
3971 bool HasPcRelativeDexCache() const {
3972 return GetMethodLoadKind() == MethodLoadKind::kDexCachePcRelative;
3974 bool HasCurrentMethodInput() const {
3975 // This function can be called only after the invoke has been fully initialized by the builder.
3976 if (NeedsCurrentMethodInput(GetMethodLoadKind())) {
3977 DCHECK(InputAt(GetSpecialInputIndex())->IsCurrentMethod());
3980 DCHECK(InputCount() == GetSpecialInputIndex() ||
3981 !InputAt(GetSpecialInputIndex())->IsCurrentMethod());
3985 bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
3986 MethodReference GetTargetMethod() const { return target_method_; }
3987 void SetTargetMethod(MethodReference method) { target_method_ = method; }
3989 int32_t GetStringInitOffset() const {
3990 DCHECK(IsStringInit());
3991 return dispatch_info_.method_load_data;
3994 uint64_t GetMethodAddress() const {
3995 DCHECK(HasMethodAddress());
3996 return dispatch_info_.method_load_data;
3999 uint32_t GetDexCacheArrayOffset() const {
4000 DCHECK(HasPcRelativeDexCache());
4001 return dispatch_info_.method_load_data;
4004 uint64_t GetDirectCodePtr() const {
4005 DCHECK(HasDirectCodePtr());
4006 return dispatch_info_.direct_code_ptr;
4009 ClinitCheckRequirement GetClinitCheckRequirement() const {
4010 return GetPackedField<ClinitCheckRequirementField>();
4013 // Is this instruction a call to a static method?
4014 bool IsStatic() const {
4015 return GetOriginalInvokeType() == kStatic;
4018 // Remove the HClinitCheck or the replacement HLoadClass (set as last input by
4019 // PrepareForRegisterAllocation::VisitClinitCheck() in lieu of the initial HClinitCheck)
4020 // instruction; only relevant for static calls with explicit clinit check.
4021 void RemoveExplicitClinitCheck(ClinitCheckRequirement new_requirement) {
4022 DCHECK(IsStaticWithExplicitClinitCheck());
4023 size_t last_input_index = InputCount() - 1;
4024 HInstruction* last_input = InputAt(last_input_index);
4025 DCHECK(last_input != nullptr);
4026 DCHECK(last_input->IsLoadClass() || last_input->IsClinitCheck()) << last_input->DebugName();
4027 RemoveAsUserOfInput(last_input_index);
4029 SetPackedField<ClinitCheckRequirementField>(new_requirement);
4030 DCHECK(!IsStaticWithExplicitClinitCheck());
4033 // Is this a call to a static method whose declaring class has an
4034 // explicit initialization check in the graph?
4035 bool IsStaticWithExplicitClinitCheck() const {
4036 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kExplicit);
4039 // Is this a call to a static method whose declaring class has an
4040 // implicit intialization check requirement?
4041 bool IsStaticWithImplicitClinitCheck() const {
4042 return IsStatic() && (GetClinitCheckRequirement() == ClinitCheckRequirement::kImplicit);
4045 // Does this method load kind need the current method as an input?
4046 static bool NeedsCurrentMethodInput(MethodLoadKind kind) {
4047 return kind == MethodLoadKind::kRecursive || kind == MethodLoadKind::kDexCacheViaMethod;
4050 DECLARE_INSTRUCTION(InvokeStaticOrDirect);
4053 const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE {
4054 const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i);
4055 if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) {
4056 HInstruction* input = input_record.GetInstruction();
4057 // `input` is the last input of a static invoke marked as having
4058 // an explicit clinit check. It must either be:
4059 // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
4060 // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
4061 DCHECK(input != nullptr);
4062 DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName();
4064 return input_record;
4067 void InsertInputAt(size_t index, HInstruction* input);
4068 void RemoveInputAt(size_t index);
4071 static constexpr size_t kFieldOptimizedInvokeType = kNumberOfInvokePackedBits;
4072 static constexpr size_t kFieldOptimizedInvokeTypeSize =
4073 MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
4074 static constexpr size_t kFieldClinitCheckRequirement =
4075 kFieldOptimizedInvokeType + kFieldOptimizedInvokeTypeSize;
4076 static constexpr size_t kFieldClinitCheckRequirementSize =
4077 MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
4078 static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
4079 kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
4080 static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
4081 "Too many packed fields.");
4082 using OptimizedInvokeTypeField =
4083 BitField<InvokeType, kFieldOptimizedInvokeType, kFieldOptimizedInvokeTypeSize>;
4084 using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
4085 kFieldClinitCheckRequirement,
4086 kFieldClinitCheckRequirementSize>;
4088 // The target method may refer to different dex file or method index than the original
4089 // invoke. This happens for sharpened calls and for calls where a method was redeclared
4090 // in derived class to increase visibility.
4091 MethodReference target_method_;
4092 DispatchInfo dispatch_info_;
4094 DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
4096 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs);
4097 std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckRequirement rhs);
4099 class HInvokeVirtual FINAL : public HInvoke {
4101 HInvokeVirtual(ArenaAllocator* arena,
4102 uint32_t number_of_arguments,
4103 Primitive::Type return_type,
4105 uint32_t dex_method_index,
4106 uint32_t vtable_index)
4107 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual),
4108 vtable_index_(vtable_index) {}
4110 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4111 // TODO: Add implicit null checks in intrinsics.
4112 return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4115 uint32_t GetVTableIndex() const { return vtable_index_; }
4117 DECLARE_INSTRUCTION(InvokeVirtual);
4120 const uint32_t vtable_index_;
4122 DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
4125 class HInvokeInterface FINAL : public HInvoke {
4127 HInvokeInterface(ArenaAllocator* arena,
4128 uint32_t number_of_arguments,
4129 Primitive::Type return_type,
4131 uint32_t dex_method_index,
4133 : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface),
4134 imt_index_(imt_index) {}
4136 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
4137 // TODO: Add implicit null checks in intrinsics.
4138 return (obj == InputAt(0)) && !GetLocations()->Intrinsified();
4141 uint32_t GetImtIndex() const { return imt_index_; }
4142 uint32_t GetDexMethodIndex() const { return dex_method_index_; }
4144 DECLARE_INSTRUCTION(InvokeInterface);
4147 const uint32_t imt_index_;
4149 DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
4152 class HNeg FINAL : public HUnaryOperation {
4154 HNeg(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4155 : HUnaryOperation(result_type, input, dex_pc) {
4156 DCHECK_EQ(result_type, Primitive::PrimitiveKind(input->GetType()));
4159 template <typename T> T Compute(T x) const { return -x; }
4161 HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4162 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4164 HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4165 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4167 HConstant* Evaluate(HFloatConstant* x) const OVERRIDE {
4168 return GetBlock()->GetGraph()->GetFloatConstant(Compute(x->GetValue()), GetDexPc());
4170 HConstant* Evaluate(HDoubleConstant* x) const OVERRIDE {
4171 return GetBlock()->GetGraph()->GetDoubleConstant(Compute(x->GetValue()), GetDexPc());
4174 DECLARE_INSTRUCTION(Neg);
4177 DISALLOW_COPY_AND_ASSIGN(HNeg);
4180 class HNewArray FINAL : public HExpression<2> {
4182 HNewArray(HInstruction* length,
4183 HCurrentMethod* current_method,
4185 uint16_t type_index,
4186 const DexFile& dex_file,
4187 QuickEntrypointEnum entrypoint)
4188 : HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
4189 type_index_(type_index),
4190 dex_file_(dex_file),
4191 entrypoint_(entrypoint) {
4192 SetRawInputAt(0, length);
4193 SetRawInputAt(1, current_method);
4196 uint16_t GetTypeIndex() const { return type_index_; }
4197 const DexFile& GetDexFile() const { return dex_file_; }
4199 // Calls runtime so needs an environment.
4200 bool NeedsEnvironment() const OVERRIDE { return true; }
4202 // May throw NegativeArraySizeException, OutOfMemoryError, etc.
4203 bool CanThrow() const OVERRIDE { return true; }
4205 bool CanBeNull() const OVERRIDE { return false; }
4207 QuickEntrypointEnum GetEntrypoint() const { return entrypoint_; }
4209 DECLARE_INSTRUCTION(NewArray);
4212 const uint16_t type_index_;
4213 const DexFile& dex_file_;
4214 const QuickEntrypointEnum entrypoint_;
4216 DISALLOW_COPY_AND_ASSIGN(HNewArray);
4219 class HAdd FINAL : public HBinaryOperation {
4221 HAdd(Primitive::Type result_type,
4223 HInstruction* right,
4224 uint32_t dex_pc = kNoDexPc)
4225 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4227 bool IsCommutative() const OVERRIDE { return true; }
4229 template <typename T> T Compute(T x, T y) const { return x + y; }
4231 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4232 return GetBlock()->GetGraph()->GetIntConstant(
4233 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4235 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4236 return GetBlock()->GetGraph()->GetLongConstant(
4237 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4239 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4240 return GetBlock()->GetGraph()->GetFloatConstant(
4241 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4243 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4244 return GetBlock()->GetGraph()->GetDoubleConstant(
4245 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4248 DECLARE_INSTRUCTION(Add);
4251 DISALLOW_COPY_AND_ASSIGN(HAdd);
4254 class HSub FINAL : public HBinaryOperation {
4256 HSub(Primitive::Type result_type,
4258 HInstruction* right,
4259 uint32_t dex_pc = kNoDexPc)
4260 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4262 template <typename T> T Compute(T x, T y) const { return x - y; }
4264 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4265 return GetBlock()->GetGraph()->GetIntConstant(
4266 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4268 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4269 return GetBlock()->GetGraph()->GetLongConstant(
4270 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4272 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4273 return GetBlock()->GetGraph()->GetFloatConstant(
4274 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4276 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4277 return GetBlock()->GetGraph()->GetDoubleConstant(
4278 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4281 DECLARE_INSTRUCTION(Sub);
4284 DISALLOW_COPY_AND_ASSIGN(HSub);
4287 class HMul FINAL : public HBinaryOperation {
4289 HMul(Primitive::Type result_type,
4291 HInstruction* right,
4292 uint32_t dex_pc = kNoDexPc)
4293 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4295 bool IsCommutative() const OVERRIDE { return true; }
4297 template <typename T> T Compute(T x, T y) const { return x * y; }
4299 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4300 return GetBlock()->GetGraph()->GetIntConstant(
4301 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4303 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4304 return GetBlock()->GetGraph()->GetLongConstant(
4305 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4307 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4308 return GetBlock()->GetGraph()->GetFloatConstant(
4309 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4311 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4312 return GetBlock()->GetGraph()->GetDoubleConstant(
4313 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4316 DECLARE_INSTRUCTION(Mul);
4319 DISALLOW_COPY_AND_ASSIGN(HMul);
4322 class HDiv FINAL : public HBinaryOperation {
4324 HDiv(Primitive::Type result_type,
4326 HInstruction* right,
4328 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4330 template <typename T>
4331 T ComputeIntegral(T x, T y) const {
4332 DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4333 // Our graph structure ensures we never have 0 for `y` during
4334 // constant folding.
4336 // Special case -1 to avoid getting a SIGFPE on x86(_64).
4337 return (y == -1) ? -x : x / y;
4340 template <typename T>
4341 T ComputeFP(T x, T y) const {
4342 DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4346 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4347 return GetBlock()->GetGraph()->GetIntConstant(
4348 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4350 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4351 return GetBlock()->GetGraph()->GetLongConstant(
4352 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4354 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4355 return GetBlock()->GetGraph()->GetFloatConstant(
4356 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4358 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4359 return GetBlock()->GetGraph()->GetDoubleConstant(
4360 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4363 static SideEffects SideEffectsForArchRuntimeCalls() {
4364 // The generated code can use a runtime call.
4365 return SideEffects::CanTriggerGC();
4368 DECLARE_INSTRUCTION(Div);
4371 DISALLOW_COPY_AND_ASSIGN(HDiv);
4374 class HRem FINAL : public HBinaryOperation {
4376 HRem(Primitive::Type result_type,
4378 HInstruction* right,
4380 : HBinaryOperation(result_type, left, right, SideEffectsForArchRuntimeCalls(), dex_pc) {}
4382 template <typename T>
4383 T ComputeIntegral(T x, T y) const {
4384 DCHECK(!Primitive::IsFloatingPointType(GetType())) << GetType();
4385 // Our graph structure ensures we never have 0 for `y` during
4386 // constant folding.
4388 // Special case -1 to avoid getting a SIGFPE on x86(_64).
4389 return (y == -1) ? 0 : x % y;
4392 template <typename T>
4393 T ComputeFP(T x, T y) const {
4394 DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
4395 return std::fmod(x, y);
4398 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4399 return GetBlock()->GetGraph()->GetIntConstant(
4400 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4402 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4403 return GetBlock()->GetGraph()->GetLongConstant(
4404 ComputeIntegral(x->GetValue(), y->GetValue()), GetDexPc());
4406 HConstant* Evaluate(HFloatConstant* x, HFloatConstant* y) const OVERRIDE {
4407 return GetBlock()->GetGraph()->GetFloatConstant(
4408 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4410 HConstant* Evaluate(HDoubleConstant* x, HDoubleConstant* y) const OVERRIDE {
4411 return GetBlock()->GetGraph()->GetDoubleConstant(
4412 ComputeFP(x->GetValue(), y->GetValue()), GetDexPc());
4415 static SideEffects SideEffectsForArchRuntimeCalls() {
4416 return SideEffects::CanTriggerGC();
4419 DECLARE_INSTRUCTION(Rem);
4422 DISALLOW_COPY_AND_ASSIGN(HRem);
4425 class HDivZeroCheck FINAL : public HExpression<1> {
4427 // `HDivZeroCheck` can trigger GC, as it may call the `ArithmeticException`
4429 HDivZeroCheck(HInstruction* value, uint32_t dex_pc)
4430 : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4431 SetRawInputAt(0, value);
4434 Primitive::Type GetType() const OVERRIDE { return InputAt(0)->GetType(); }
4436 bool CanBeMoved() const OVERRIDE { return true; }
4438 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4442 bool NeedsEnvironment() const OVERRIDE { return true; }
4443 bool CanThrow() const OVERRIDE { return true; }
4445 DECLARE_INSTRUCTION(DivZeroCheck);
4448 DISALLOW_COPY_AND_ASSIGN(HDivZeroCheck);
4451 class HShl FINAL : public HBinaryOperation {
4453 HShl(Primitive::Type result_type,
4454 HInstruction* value,
4455 HInstruction* distance,
4456 uint32_t dex_pc = kNoDexPc)
4457 : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4458 DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4459 DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4462 template <typename T>
4463 T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4464 return value << (distance & max_shift_distance);
4467 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4468 return GetBlock()->GetGraph()->GetIntConstant(
4469 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4471 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4472 return GetBlock()->GetGraph()->GetLongConstant(
4473 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4475 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4476 HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4477 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4480 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4481 HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4482 LOG(FATAL) << DebugName() << " is not defined for float values";
4485 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4486 HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4487 LOG(FATAL) << DebugName() << " is not defined for double values";
4491 DECLARE_INSTRUCTION(Shl);
4494 DISALLOW_COPY_AND_ASSIGN(HShl);
4497 class HShr FINAL : public HBinaryOperation {
4499 HShr(Primitive::Type result_type,
4500 HInstruction* value,
4501 HInstruction* distance,
4502 uint32_t dex_pc = kNoDexPc)
4503 : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4504 DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4505 DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4508 template <typename T>
4509 T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4510 return value >> (distance & max_shift_distance);
4513 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4514 return GetBlock()->GetGraph()->GetIntConstant(
4515 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4517 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4518 return GetBlock()->GetGraph()->GetLongConstant(
4519 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4521 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4522 HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4523 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4526 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4527 HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4528 LOG(FATAL) << DebugName() << " is not defined for float values";
4531 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4532 HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4533 LOG(FATAL) << DebugName() << " is not defined for double values";
4537 DECLARE_INSTRUCTION(Shr);
4540 DISALLOW_COPY_AND_ASSIGN(HShr);
4543 class HUShr FINAL : public HBinaryOperation {
4545 HUShr(Primitive::Type result_type,
4546 HInstruction* value,
4547 HInstruction* distance,
4548 uint32_t dex_pc = kNoDexPc)
4549 : HBinaryOperation(result_type, value, distance, SideEffects::None(), dex_pc) {
4550 DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4551 DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4554 template <typename T>
4555 T Compute(T value, int32_t distance, int32_t max_shift_distance) const {
4556 typedef typename std::make_unsigned<T>::type V;
4557 V ux = static_cast<V>(value);
4558 return static_cast<T>(ux >> (distance & max_shift_distance));
4561 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4562 return GetBlock()->GetGraph()->GetIntConstant(
4563 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4565 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4566 return GetBlock()->GetGraph()->GetLongConstant(
4567 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4569 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4570 HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4571 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4574 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4575 HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4576 LOG(FATAL) << DebugName() << " is not defined for float values";
4579 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4580 HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4581 LOG(FATAL) << DebugName() << " is not defined for double values";
4585 DECLARE_INSTRUCTION(UShr);
4588 DISALLOW_COPY_AND_ASSIGN(HUShr);
4591 class HAnd FINAL : public HBinaryOperation {
4593 HAnd(Primitive::Type result_type,
4595 HInstruction* right,
4596 uint32_t dex_pc = kNoDexPc)
4597 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4599 bool IsCommutative() const OVERRIDE { return true; }
4601 template <typename T> T Compute(T x, T y) const { return x & y; }
4603 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4604 return GetBlock()->GetGraph()->GetIntConstant(
4605 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4607 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4608 return GetBlock()->GetGraph()->GetLongConstant(
4609 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4611 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4612 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4613 LOG(FATAL) << DebugName() << " is not defined for float values";
4616 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4617 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4618 LOG(FATAL) << DebugName() << " is not defined for double values";
4622 DECLARE_INSTRUCTION(And);
4625 DISALLOW_COPY_AND_ASSIGN(HAnd);
4628 class HOr FINAL : public HBinaryOperation {
4630 HOr(Primitive::Type result_type,
4632 HInstruction* right,
4633 uint32_t dex_pc = kNoDexPc)
4634 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4636 bool IsCommutative() const OVERRIDE { return true; }
4638 template <typename T> T Compute(T x, T y) const { return x | y; }
4640 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4641 return GetBlock()->GetGraph()->GetIntConstant(
4642 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4644 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4645 return GetBlock()->GetGraph()->GetLongConstant(
4646 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4648 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4649 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4650 LOG(FATAL) << DebugName() << " is not defined for float values";
4653 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4654 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4655 LOG(FATAL) << DebugName() << " is not defined for double values";
4659 DECLARE_INSTRUCTION(Or);
4662 DISALLOW_COPY_AND_ASSIGN(HOr);
4665 class HXor FINAL : public HBinaryOperation {
4667 HXor(Primitive::Type result_type,
4669 HInstruction* right,
4670 uint32_t dex_pc = kNoDexPc)
4671 : HBinaryOperation(result_type, left, right, SideEffects::None(), dex_pc) {}
4673 bool IsCommutative() const OVERRIDE { return true; }
4675 template <typename T> T Compute(T x, T y) const { return x ^ y; }
4677 HConstant* Evaluate(HIntConstant* x, HIntConstant* y) const OVERRIDE {
4678 return GetBlock()->GetGraph()->GetIntConstant(
4679 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4681 HConstant* Evaluate(HLongConstant* x, HLongConstant* y) const OVERRIDE {
4682 return GetBlock()->GetGraph()->GetLongConstant(
4683 Compute(x->GetValue(), y->GetValue()), GetDexPc());
4685 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED,
4686 HFloatConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4687 LOG(FATAL) << DebugName() << " is not defined for float values";
4690 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED,
4691 HDoubleConstant* y ATTRIBUTE_UNUSED) const OVERRIDE {
4692 LOG(FATAL) << DebugName() << " is not defined for double values";
4696 DECLARE_INSTRUCTION(Xor);
4699 DISALLOW_COPY_AND_ASSIGN(HXor);
4702 class HRor FINAL : public HBinaryOperation {
4704 HRor(Primitive::Type result_type, HInstruction* value, HInstruction* distance)
4705 : HBinaryOperation(result_type, value, distance) {
4706 DCHECK_EQ(result_type, Primitive::PrimitiveKind(value->GetType()));
4707 DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(distance->GetType()));
4710 template <typename T>
4711 T Compute(T value, int32_t distance, int32_t max_shift_value) const {
4712 typedef typename std::make_unsigned<T>::type V;
4713 V ux = static_cast<V>(value);
4714 if ((distance & max_shift_value) == 0) {
4715 return static_cast<T>(ux);
4717 const V reg_bits = sizeof(T) * 8;
4718 return static_cast<T>(ux >> (distance & max_shift_value)) |
4719 (value << (reg_bits - (distance & max_shift_value)));
4723 HConstant* Evaluate(HIntConstant* value, HIntConstant* distance) const OVERRIDE {
4724 return GetBlock()->GetGraph()->GetIntConstant(
4725 Compute(value->GetValue(), distance->GetValue(), kMaxIntShiftDistance), GetDexPc());
4727 HConstant* Evaluate(HLongConstant* value, HIntConstant* distance) const OVERRIDE {
4728 return GetBlock()->GetGraph()->GetLongConstant(
4729 Compute(value->GetValue(), distance->GetValue(), kMaxLongShiftDistance), GetDexPc());
4731 HConstant* Evaluate(HLongConstant* value ATTRIBUTE_UNUSED,
4732 HLongConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4733 LOG(FATAL) << DebugName() << " is not defined for the (long, long) case.";
4736 HConstant* Evaluate(HFloatConstant* value ATTRIBUTE_UNUSED,
4737 HFloatConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4738 LOG(FATAL) << DebugName() << " is not defined for float values";
4741 HConstant* Evaluate(HDoubleConstant* value ATTRIBUTE_UNUSED,
4742 HDoubleConstant* distance ATTRIBUTE_UNUSED) const OVERRIDE {
4743 LOG(FATAL) << DebugName() << " is not defined for double values";
4747 DECLARE_INSTRUCTION(Ror);
4750 DISALLOW_COPY_AND_ASSIGN(HRor);
4753 // The value of a parameter in this method. Its location depends on
4754 // the calling convention.
4755 class HParameterValue FINAL : public HExpression<0> {
4757 HParameterValue(const DexFile& dex_file,
4758 uint16_t type_index,
4760 Primitive::Type parameter_type,
4761 bool is_this = false)
4762 : HExpression(parameter_type, SideEffects::None(), kNoDexPc),
4763 dex_file_(dex_file),
4764 type_index_(type_index),
4766 SetPackedFlag<kFlagIsThis>(is_this);
4767 SetPackedFlag<kFlagCanBeNull>(!is_this);
4770 const DexFile& GetDexFile() const { return dex_file_; }
4771 uint16_t GetTypeIndex() const { return type_index_; }
4772 uint8_t GetIndex() const { return index_; }
4773 bool IsThis() const { return GetPackedFlag<kFlagIsThis>(); }
4775 bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
4776 void SetCanBeNull(bool can_be_null) { SetPackedFlag<kFlagCanBeNull>(can_be_null); }
4778 DECLARE_INSTRUCTION(ParameterValue);
4781 // Whether or not the parameter value corresponds to 'this' argument.
4782 static constexpr size_t kFlagIsThis = kNumberOfExpressionPackedBits;
4783 static constexpr size_t kFlagCanBeNull = kFlagIsThis + 1;
4784 static constexpr size_t kNumberOfParameterValuePackedBits = kFlagCanBeNull + 1;
4785 static_assert(kNumberOfParameterValuePackedBits <= kMaxNumberOfPackedBits,
4786 "Too many packed fields.");
4788 const DexFile& dex_file_;
4789 const uint16_t type_index_;
4790 // The index of this parameter in the parameters list. Must be less
4791 // than HGraph::number_of_in_vregs_.
4792 const uint8_t index_;
4794 DISALLOW_COPY_AND_ASSIGN(HParameterValue);
4797 class HNot FINAL : public HUnaryOperation {
4799 HNot(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc = kNoDexPc)
4800 : HUnaryOperation(result_type, input, dex_pc) {}
4802 bool CanBeMoved() const OVERRIDE { return true; }
4803 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4807 template <typename T> T Compute(T x) const { return ~x; }
4809 HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4810 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4812 HConstant* Evaluate(HLongConstant* x) const OVERRIDE {
4813 return GetBlock()->GetGraph()->GetLongConstant(Compute(x->GetValue()), GetDexPc());
4815 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4816 LOG(FATAL) << DebugName() << " is not defined for float values";
4819 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4820 LOG(FATAL) << DebugName() << " is not defined for double values";
4824 DECLARE_INSTRUCTION(Not);
4827 DISALLOW_COPY_AND_ASSIGN(HNot);
4830 class HBooleanNot FINAL : public HUnaryOperation {
4832 explicit HBooleanNot(HInstruction* input, uint32_t dex_pc = kNoDexPc)
4833 : HUnaryOperation(Primitive::Type::kPrimBoolean, input, dex_pc) {}
4835 bool CanBeMoved() const OVERRIDE { return true; }
4836 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4840 template <typename T> bool Compute(T x) const {
4841 DCHECK(IsUint<1>(x)) << x;
4845 HConstant* Evaluate(HIntConstant* x) const OVERRIDE {
4846 return GetBlock()->GetGraph()->GetIntConstant(Compute(x->GetValue()), GetDexPc());
4848 HConstant* Evaluate(HLongConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4849 LOG(FATAL) << DebugName() << " is not defined for long values";
4852 HConstant* Evaluate(HFloatConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4853 LOG(FATAL) << DebugName() << " is not defined for float values";
4856 HConstant* Evaluate(HDoubleConstant* x ATTRIBUTE_UNUSED) const OVERRIDE {
4857 LOG(FATAL) << DebugName() << " is not defined for double values";
4861 DECLARE_INSTRUCTION(BooleanNot);
4864 DISALLOW_COPY_AND_ASSIGN(HBooleanNot);
4867 class HTypeConversion FINAL : public HExpression<1> {
4869 // Instantiate a type conversion of `input` to `result_type`.
4870 HTypeConversion(Primitive::Type result_type, HInstruction* input, uint32_t dex_pc)
4871 : HExpression(result_type,
4872 SideEffectsForArchRuntimeCalls(input->GetType(), result_type),
4874 SetRawInputAt(0, input);
4875 // Invariant: We should never generate a conversion to a Boolean value.
4876 DCHECK_NE(Primitive::kPrimBoolean, result_type);
4879 HInstruction* GetInput() const { return InputAt(0); }
4880 Primitive::Type GetInputType() const { return GetInput()->GetType(); }
4881 Primitive::Type GetResultType() const { return GetType(); }
4883 bool CanBeMoved() const OVERRIDE { return true; }
4884 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; }
4886 // Try to statically evaluate the conversion and return a HConstant
4887 // containing the result. If the input cannot be converted, return nullptr.
4888 HConstant* TryStaticEvaluation() const;
4890 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type input_type,
4891 Primitive::Type result_type) {
4892 // Some architectures may not require the 'GC' side effects, but at this point
4893 // in the compilation process we do not know what architecture we will
4894 // generate code for, so we must be conservative.
4895 if ((Primitive::IsFloatingPointType(input_type) && Primitive::IsIntegralType(result_type))
4896 || (input_type == Primitive::kPrimLong && Primitive::IsFloatingPointType(result_type))) {
4897 return SideEffects::CanTriggerGC();
4899 return SideEffects::None();
4902 DECLARE_INSTRUCTION(TypeConversion);
4905 DISALLOW_COPY_AND_ASSIGN(HTypeConversion);
4908 static constexpr uint32_t kNoRegNumber = -1;
4910 class HNullCheck FINAL : public HExpression<1> {
4912 // `HNullCheck` can trigger GC, as it may call the `NullPointerException`
4914 HNullCheck(HInstruction* value, uint32_t dex_pc)
4915 : HExpression(value->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
4916 SetRawInputAt(0, value);
4919 bool CanBeMoved() const OVERRIDE { return true; }
4920 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
4924 bool NeedsEnvironment() const OVERRIDE { return true; }
4926 bool CanThrow() const OVERRIDE { return true; }
4928 bool CanBeNull() const OVERRIDE { return false; }
4931 DECLARE_INSTRUCTION(NullCheck);
4934 DISALLOW_COPY_AND_ASSIGN(HNullCheck);
4937 class FieldInfo : public ValueObject {
4939 FieldInfo(MemberOffset field_offset,
4940 Primitive::Type field_type,
4943 uint16_t declaring_class_def_index,
4944 const DexFile& dex_file,
4945 Handle<mirror::DexCache> dex_cache)
4946 : field_offset_(field_offset),
4947 field_type_(field_type),
4948 is_volatile_(is_volatile),
4950 declaring_class_def_index_(declaring_class_def_index),
4951 dex_file_(dex_file),
4952 dex_cache_(dex_cache) {}
4954 MemberOffset GetFieldOffset() const { return field_offset_; }
4955 Primitive::Type GetFieldType() const { return field_type_; }
4956 uint32_t GetFieldIndex() const { return index_; }
4957 uint16_t GetDeclaringClassDefIndex() const { return declaring_class_def_index_;}
4958 const DexFile& GetDexFile() const { return dex_file_; }
4959 bool IsVolatile() const { return is_volatile_; }
4960 Handle<mirror::DexCache> GetDexCache() const { return dex_cache_; }
4963 const MemberOffset field_offset_;
4964 const Primitive::Type field_type_;
4965 const bool is_volatile_;
4966 const uint32_t index_;
4967 const uint16_t declaring_class_def_index_;
4968 const DexFile& dex_file_;
4969 const Handle<mirror::DexCache> dex_cache_;
4972 class HInstanceFieldGet FINAL : public HExpression<1> {
4974 HInstanceFieldGet(HInstruction* value,
4975 Primitive::Type field_type,
4976 MemberOffset field_offset,
4979 uint16_t declaring_class_def_index,
4980 const DexFile& dex_file,
4981 Handle<mirror::DexCache> dex_cache,
4983 : HExpression(field_type,
4984 SideEffects::FieldReadOfType(field_type, is_volatile),
4986 field_info_(field_offset,
4990 declaring_class_def_index,
4993 SetRawInputAt(0, value);
4996 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
4998 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
4999 HInstanceFieldGet* other_get = other->AsInstanceFieldGet();
5000 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5003 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5004 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize;
5007 size_t ComputeHashCode() const OVERRIDE {
5008 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5011 const FieldInfo& GetFieldInfo() const { return field_info_; }
5012 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5013 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5014 bool IsVolatile() const { return field_info_.IsVolatile(); }
5016 DECLARE_INSTRUCTION(InstanceFieldGet);
5019 const FieldInfo field_info_;
5021 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldGet);
5024 class HInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5026 HInstanceFieldSet(HInstruction* object,
5027 HInstruction* value,
5028 Primitive::Type field_type,
5029 MemberOffset field_offset,
5032 uint16_t declaring_class_def_index,
5033 const DexFile& dex_file,
5034 Handle<mirror::DexCache> dex_cache,
5036 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile),
5038 field_info_(field_offset,
5042 declaring_class_def_index,
5045 SetPackedFlag<kFlagValueCanBeNull>(true);
5046 SetRawInputAt(0, object);
5047 SetRawInputAt(1, value);
5050 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5051 return (obj == InputAt(0)) && GetFieldOffset().Uint32Value() < kPageSize;
5054 const FieldInfo& GetFieldInfo() const { return field_info_; }
5055 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5056 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5057 bool IsVolatile() const { return field_info_.IsVolatile(); }
5058 HInstruction* GetValue() const { return InputAt(1); }
5059 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5060 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5062 DECLARE_INSTRUCTION(InstanceFieldSet);
5065 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5066 static constexpr size_t kNumberOfInstanceFieldSetPackedBits = kFlagValueCanBeNull + 1;
5067 static_assert(kNumberOfInstanceFieldSetPackedBits <= kMaxNumberOfPackedBits,
5068 "Too many packed fields.");
5070 const FieldInfo field_info_;
5072 DISALLOW_COPY_AND_ASSIGN(HInstanceFieldSet);
5075 class HArrayGet FINAL : public HExpression<2> {
5077 HArrayGet(HInstruction* array, HInstruction* index, Primitive::Type type, uint32_t dex_pc)
5078 : HExpression(type, SideEffects::ArrayReadOfType(type), dex_pc) {
5079 SetRawInputAt(0, array);
5080 SetRawInputAt(1, index);
5083 bool CanBeMoved() const OVERRIDE { return true; }
5084 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5087 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5088 // TODO: We can be smarter here.
5089 // Currently, the array access is always preceded by an ArrayLength or a NullCheck
5090 // which generates the implicit null check. There are cases when these can be removed
5091 // to produce better code. If we ever add optimizations to do so we should allow an
5092 // implicit check here (as long as the address falls in the first page).
5096 bool IsEquivalentOf(HArrayGet* other) const {
5097 bool result = (GetDexPc() == other->GetDexPc());
5098 if (kIsDebugBuild && result) {
5099 DCHECK_EQ(GetBlock(), other->GetBlock());
5100 DCHECK_EQ(GetArray(), other->GetArray());
5101 DCHECK_EQ(GetIndex(), other->GetIndex());
5102 if (Primitive::IsIntOrLongType(GetType())) {
5103 DCHECK(Primitive::IsFloatingPointType(other->GetType())) << other->GetType();
5105 DCHECK(Primitive::IsFloatingPointType(GetType())) << GetType();
5106 DCHECK(Primitive::IsIntOrLongType(other->GetType())) << other->GetType();
5112 HInstruction* GetArray() const { return InputAt(0); }
5113 HInstruction* GetIndex() const { return InputAt(1); }
5115 DECLARE_INSTRUCTION(ArrayGet);
5118 DISALLOW_COPY_AND_ASSIGN(HArrayGet);
5121 class HArraySet FINAL : public HTemplateInstruction<3> {
5123 HArraySet(HInstruction* array,
5124 HInstruction* index,
5125 HInstruction* value,
5126 Primitive::Type expected_component_type,
5128 : HTemplateInstruction(SideEffects::None(), dex_pc) {
5129 SetPackedField<ExpectedComponentTypeField>(expected_component_type);
5130 SetPackedFlag<kFlagNeedsTypeCheck>(value->GetType() == Primitive::kPrimNot);
5131 SetPackedFlag<kFlagValueCanBeNull>(true);
5132 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(false);
5133 SetRawInputAt(0, array);
5134 SetRawInputAt(1, index);
5135 SetRawInputAt(2, value);
5136 // Make a best guess now, may be refined during SSA building.
5137 ComputeSideEffects();
5140 bool NeedsEnvironment() const OVERRIDE {
5141 // We call a runtime method to throw ArrayStoreException.
5142 return NeedsTypeCheck();
5145 // Can throw ArrayStoreException.
5146 bool CanThrow() const OVERRIDE { return NeedsTypeCheck(); }
5148 bool CanDoImplicitNullCheckOn(HInstruction* obj ATTRIBUTE_UNUSED) const OVERRIDE {
5149 // TODO: Same as for ArrayGet.
5153 void ClearNeedsTypeCheck() {
5154 SetPackedFlag<kFlagNeedsTypeCheck>(false);
5157 void ClearValueCanBeNull() {
5158 SetPackedFlag<kFlagValueCanBeNull>(false);
5161 void SetStaticTypeOfArrayIsObjectArray() {
5162 SetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>(true);
5165 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5166 bool NeedsTypeCheck() const { return GetPackedFlag<kFlagNeedsTypeCheck>(); }
5167 bool StaticTypeOfArrayIsObjectArray() const {
5168 return GetPackedFlag<kFlagStaticTypeOfArrayIsObjectArray>();
5171 HInstruction* GetArray() const { return InputAt(0); }
5172 HInstruction* GetIndex() const { return InputAt(1); }
5173 HInstruction* GetValue() const { return InputAt(2); }
5175 Primitive::Type GetComponentType() const {
5176 // The Dex format does not type floating point index operations. Since the
5177 // `expected_component_type_` is set during building and can therefore not
5178 // be correct, we also check what is the value type. If it is a floating
5179 // point type, we must use that type.
5180 Primitive::Type value_type = GetValue()->GetType();
5181 return ((value_type == Primitive::kPrimFloat) || (value_type == Primitive::kPrimDouble))
5183 : GetRawExpectedComponentType();
5186 Primitive::Type GetRawExpectedComponentType() const {
5187 return GetPackedField<ExpectedComponentTypeField>();
5190 void ComputeSideEffects() {
5191 Primitive::Type type = GetComponentType();
5192 SetSideEffects(SideEffects::ArrayWriteOfType(type).Union(
5193 SideEffectsForArchRuntimeCalls(type)));
5196 static SideEffects SideEffectsForArchRuntimeCalls(Primitive::Type value_type) {
5197 return (value_type == Primitive::kPrimNot) ? SideEffects::CanTriggerGC() : SideEffects::None();
5200 DECLARE_INSTRUCTION(ArraySet);
5203 static constexpr size_t kFieldExpectedComponentType = kNumberOfGenericPackedBits;
5204 static constexpr size_t kFieldExpectedComponentTypeSize =
5205 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5206 static constexpr size_t kFlagNeedsTypeCheck =
5207 kFieldExpectedComponentType + kFieldExpectedComponentTypeSize;
5208 static constexpr size_t kFlagValueCanBeNull = kFlagNeedsTypeCheck + 1;
5209 // Cached information for the reference_type_info_ so that codegen
5210 // does not need to inspect the static type.
5211 static constexpr size_t kFlagStaticTypeOfArrayIsObjectArray = kFlagValueCanBeNull + 1;
5212 static constexpr size_t kNumberOfArraySetPackedBits =
5213 kFlagStaticTypeOfArrayIsObjectArray + 1;
5214 static_assert(kNumberOfArraySetPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5215 using ExpectedComponentTypeField =
5216 BitField<Primitive::Type, kFieldExpectedComponentType, kFieldExpectedComponentTypeSize>;
5218 DISALLOW_COPY_AND_ASSIGN(HArraySet);
5221 class HArrayLength FINAL : public HExpression<1> {
5223 HArrayLength(HInstruction* array, uint32_t dex_pc)
5224 : HExpression(Primitive::kPrimInt, SideEffects::None(), dex_pc) {
5225 // Note that arrays do not change length, so the instruction does not
5226 // depend on any write.
5227 SetRawInputAt(0, array);
5230 bool CanBeMoved() const OVERRIDE { return true; }
5231 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5234 bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
5235 return obj == InputAt(0);
5238 void MarkAsStringLength() { SetPackedFlag<kFlagIsStringLength>(); }
5239 bool IsStringLength() const { return GetPackedFlag<kFlagIsStringLength>(); }
5241 DECLARE_INSTRUCTION(ArrayLength);
5244 // We treat a String as an array, creating the HArrayLength from String.length()
5245 // or String.isEmpty() intrinsic in the instruction simplifier. We can always
5246 // determine whether a particular HArrayLength is actually a String.length() by
5247 // looking at the type of the input but that requires holding the mutator lock, so
5248 // we prefer to use a flag, so that code generators don't need to do the locking.
5249 static constexpr size_t kFlagIsStringLength = kNumberOfExpressionPackedBits;
5250 static constexpr size_t kNumberOfArrayLengthPackedBits = kFlagIsStringLength + 1;
5251 static_assert(kNumberOfArrayLengthPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5252 "Too many packed fields.");
5254 DISALLOW_COPY_AND_ASSIGN(HArrayLength);
5257 class HBoundsCheck FINAL : public HExpression<2> {
5259 // `HBoundsCheck` can trigger GC, as it may call the `IndexOutOfBoundsException`
5261 HBoundsCheck(HInstruction* index, HInstruction* length, uint32_t dex_pc)
5262 : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
5263 DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
5264 SetRawInputAt(0, index);
5265 SetRawInputAt(1, length);
5268 bool CanBeMoved() const OVERRIDE { return true; }
5269 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5273 bool NeedsEnvironment() const OVERRIDE { return true; }
5275 bool CanThrow() const OVERRIDE { return true; }
5277 HInstruction* GetIndex() const { return InputAt(0); }
5279 DECLARE_INSTRUCTION(BoundsCheck);
5282 DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
5285 class HSuspendCheck FINAL : public HTemplateInstruction<0> {
5287 explicit HSuspendCheck(uint32_t dex_pc = kNoDexPc)
5288 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc), slow_path_(nullptr) {}
5290 bool NeedsEnvironment() const OVERRIDE {
5294 void SetSlowPath(SlowPathCode* slow_path) { slow_path_ = slow_path; }
5295 SlowPathCode* GetSlowPath() const { return slow_path_; }
5297 DECLARE_INSTRUCTION(SuspendCheck);
5300 // Only used for code generation, in order to share the same slow path between back edges
5302 SlowPathCode* slow_path_;
5304 DISALLOW_COPY_AND_ASSIGN(HSuspendCheck);
5307 // Pseudo-instruction which provides the native debugger with mapping information.
5308 // It ensures that we can generate line number and local variables at this point.
5309 class HNativeDebugInfo : public HTemplateInstruction<0> {
5311 explicit HNativeDebugInfo(uint32_t dex_pc)
5312 : HTemplateInstruction<0>(SideEffects::None(), dex_pc) {}
5314 bool NeedsEnvironment() const OVERRIDE {
5318 DECLARE_INSTRUCTION(NativeDebugInfo);
5321 DISALLOW_COPY_AND_ASSIGN(HNativeDebugInfo);
5325 * Instruction to load a Class object.
5327 class HLoadClass FINAL : public HExpression<1> {
5329 HLoadClass(HCurrentMethod* current_method,
5330 uint16_t type_index,
5331 const DexFile& dex_file,
5332 bool is_referrers_class,
5334 bool needs_access_check,
5335 bool is_in_dex_cache)
5336 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc),
5337 type_index_(type_index),
5338 dex_file_(dex_file),
5339 loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
5340 // Referrers class should not need access check. We never inline unverified
5341 // methods so we can't possibly end up in this situation.
5342 DCHECK(!is_referrers_class || !needs_access_check);
5344 SetPackedFlag<kFlagIsReferrersClass>(is_referrers_class);
5345 SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
5346 SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache);
5347 SetPackedFlag<kFlagGenerateClInitCheck>(false);
5348 SetRawInputAt(0, current_method);
5351 bool CanBeMoved() const OVERRIDE { return true; }
5353 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5354 // Note that we don't need to test for generate_clinit_check_.
5355 // Whether or not we need to generate the clinit check is processed in
5356 // prepare_for_register_allocator based on existing HInvokes and HClinitChecks.
5357 return other->AsLoadClass()->type_index_ == type_index_ &&
5358 other->AsLoadClass()->GetPackedFields() == GetPackedFields();
5361 size_t ComputeHashCode() const OVERRIDE { return type_index_; }
5363 uint16_t GetTypeIndex() const { return type_index_; }
5364 bool CanBeNull() const OVERRIDE { return false; }
5366 bool NeedsEnvironment() const OVERRIDE {
5367 return CanCallRuntime();
5370 void SetMustGenerateClinitCheck(bool generate_clinit_check) {
5371 // The entrypoint the code generator is going to call does not do
5372 // clinit of the class.
5373 DCHECK(!NeedsAccessCheck());
5374 SetPackedFlag<kFlagGenerateClInitCheck>(generate_clinit_check);
5377 bool CanCallRuntime() const {
5378 return MustGenerateClinitCheck() ||
5379 (!IsReferrersClass() && !IsInDexCache()) ||
5384 bool CanThrow() const OVERRIDE {
5385 return CanCallRuntime();
5388 ReferenceTypeInfo GetLoadedClassRTI() {
5389 return loaded_class_rti_;
5392 void SetLoadedClassRTI(ReferenceTypeInfo rti) {
5393 // Make sure we only set exact types (the loaded class should never be merged).
5394 DCHECK(rti.IsExact());
5395 loaded_class_rti_ = rti;
5398 const DexFile& GetDexFile() { return dex_file_; }
5400 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE { return !IsReferrersClass(); }
5402 static SideEffects SideEffectsForArchRuntimeCalls() {
5403 return SideEffects::CanTriggerGC();
5406 bool IsReferrersClass() const { return GetPackedFlag<kFlagIsReferrersClass>(); }
5407 bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
5408 bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5409 bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
5411 DECLARE_INSTRUCTION(LoadClass);
5414 static constexpr size_t kFlagIsReferrersClass = kNumberOfExpressionPackedBits;
5415 static constexpr size_t kFlagNeedsAccessCheck = kFlagIsReferrersClass + 1;
5416 static constexpr size_t kFlagIsInDexCache = kFlagNeedsAccessCheck + 1;
5417 // Whether this instruction must generate the initialization check.
5418 // Used for code generation.
5419 static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInDexCache + 1;
5420 static constexpr size_t kNumberOfLoadClassPackedBits = kFlagGenerateClInitCheck + 1;
5421 static_assert(kNumberOfLoadClassPackedBits < kMaxNumberOfPackedBits, "Too many packed fields.");
5423 const uint16_t type_index_;
5424 const DexFile& dex_file_;
5426 ReferenceTypeInfo loaded_class_rti_;
5428 DISALLOW_COPY_AND_ASSIGN(HLoadClass);
5431 class HLoadString FINAL : public HExpression<1> {
5433 // Determines how to load the String.
5434 enum class LoadKind {
5435 // Use boot image String* address that will be known at link time.
5436 // Used for boot image strings referenced by boot image code in non-PIC mode.
5437 kBootImageLinkTimeAddress,
5439 // Use PC-relative boot image String* address that will be known at link time.
5440 // Used for boot image strings referenced by boot image code in PIC mode.
5441 kBootImageLinkTimePcRelative,
5443 // Use a known boot image String* address, embedded in the code by the codegen.
5444 // Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
5445 // Note: codegen needs to emit a linker patch if indicated by compiler options'
5446 // GetIncludePatchInformation().
5449 // Load from the resolved strings array at an absolute address.
5450 // Used for strings outside the boot image referenced by JIT-compiled code.
5453 // Load from resolved strings array in the dex cache using a PC-relative load.
5454 // Used for strings outside boot image when we know that we can access
5455 // the dex cache arrays using a PC-relative load.
5456 kDexCachePcRelative,
5458 // Load from resolved strings array accessed through the class loaded from
5459 // the compiled method's own ArtMethod*. This is the default access type when
5460 // all other types are unavailable.
5463 kLast = kDexCacheViaMethod
5466 HLoadString(HCurrentMethod* current_method,
5467 uint32_t string_index,
5468 const DexFile& dex_file,
5470 : HExpression(Primitive::kPrimNot, SideEffectsForArchRuntimeCalls(), dex_pc),
5471 string_index_(string_index) {
5472 SetPackedFlag<kFlagIsInDexCache>(false);
5473 SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod);
5474 load_data_.ref.dex_file = &dex_file;
5475 SetRawInputAt(0, current_method);
5478 void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
5479 DCHECK(HasAddress(load_kind));
5480 load_data_.address = address;
5481 SetLoadKindInternal(load_kind);
5484 void SetLoadKindWithStringReference(LoadKind load_kind,
5485 const DexFile& dex_file,
5486 uint32_t string_index) {
5487 DCHECK(HasStringReference(load_kind));
5488 load_data_.ref.dex_file = &dex_file;
5489 string_index_ = string_index;
5490 SetLoadKindInternal(load_kind);
5493 void SetLoadKindWithDexCacheReference(LoadKind load_kind,
5494 const DexFile& dex_file,
5495 uint32_t element_index) {
5496 DCHECK(HasDexCacheReference(load_kind));
5497 load_data_.ref.dex_file = &dex_file;
5498 load_data_.ref.dex_cache_element_index = element_index;
5499 SetLoadKindInternal(load_kind);
5502 LoadKind GetLoadKind() const {
5503 return GetPackedField<LoadKindField>();
5506 const DexFile& GetDexFile() const;
5508 uint32_t GetStringIndex() const {
5509 DCHECK(HasStringReference(GetLoadKind()) || /* For slow paths. */ !IsInDexCache());
5510 return string_index_;
5513 uint32_t GetDexCacheElementOffset() const;
5515 uint64_t GetAddress() const {
5516 DCHECK(HasAddress(GetLoadKind()));
5517 return load_data_.address;
5520 bool CanBeMoved() const OVERRIDE { return true; }
5522 bool InstructionDataEquals(HInstruction* other) const OVERRIDE;
5524 size_t ComputeHashCode() const OVERRIDE { return string_index_; }
5526 // Will call the runtime if we need to load the string through
5527 // the dex cache and the string is not guaranteed to be there yet.
5528 bool NeedsEnvironment() const OVERRIDE {
5529 LoadKind load_kind = GetLoadKind();
5530 if (load_kind == LoadKind::kBootImageLinkTimeAddress ||
5531 load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5532 load_kind == LoadKind::kBootImageAddress) {
5535 return !IsInDexCache();
5538 bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
5539 return GetLoadKind() == LoadKind::kDexCacheViaMethod;
5542 bool CanBeNull() const OVERRIDE { return false; }
5543 bool CanThrow() const OVERRIDE { return NeedsEnvironment(); }
5545 static SideEffects SideEffectsForArchRuntimeCalls() {
5546 return SideEffects::CanTriggerGC();
5549 bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
5551 void MarkInDexCache() {
5552 SetPackedFlag<kFlagIsInDexCache>(true);
5553 DCHECK(!NeedsEnvironment());
5554 RemoveEnvironment();
5555 SetSideEffects(SideEffects::None());
5558 size_t InputCount() const OVERRIDE {
5559 return (InputAt(0) != nullptr) ? 1u : 0u;
5562 void AddSpecialInput(HInstruction* special_input);
5564 DECLARE_INSTRUCTION(LoadString);
5567 static constexpr size_t kFlagIsInDexCache = kNumberOfExpressionPackedBits;
5568 static constexpr size_t kFieldLoadKind = kFlagIsInDexCache + 1;
5569 static constexpr size_t kFieldLoadKindSize =
5570 MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast));
5571 static constexpr size_t kNumberOfLoadStringPackedBits = kFieldLoadKind + kFieldLoadKindSize;
5572 static_assert(kNumberOfLoadStringPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5573 using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
5575 static bool HasStringReference(LoadKind load_kind) {
5576 return load_kind == LoadKind::kBootImageLinkTimeAddress ||
5577 load_kind == LoadKind::kBootImageLinkTimePcRelative ||
5578 load_kind == LoadKind::kDexCacheViaMethod;
5581 static bool HasAddress(LoadKind load_kind) {
5582 return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress;
5585 static bool HasDexCacheReference(LoadKind load_kind) {
5586 return load_kind == LoadKind::kDexCachePcRelative;
5589 void SetLoadKindInternal(LoadKind load_kind);
5591 // String index serves also as the hash code and it's also needed for slow-paths,
5592 // so it must not be overwritten with other load data.
5593 uint32_t string_index_;
5597 const DexFile* dex_file; // For string reference and dex cache reference.
5598 uint32_t dex_cache_element_index; // Only for dex cache reference.
5600 uint64_t address; // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets.
5603 DISALLOW_COPY_AND_ASSIGN(HLoadString);
5605 std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs);
5607 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5608 inline const DexFile& HLoadString::GetDexFile() const {
5609 DCHECK(HasStringReference(GetLoadKind()) || HasDexCacheReference(GetLoadKind()))
5611 return *load_data_.ref.dex_file;
5614 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5615 inline uint32_t HLoadString::GetDexCacheElementOffset() const {
5616 DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
5617 return load_data_.ref.dex_cache_element_index;
5620 // Note: defined outside class to see operator<<(., HLoadString::LoadKind).
5621 inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
5622 // The special input is used for PC-relative loads on some architectures.
5623 DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
5624 GetLoadKind() == LoadKind::kDexCachePcRelative) << GetLoadKind();
5625 DCHECK(InputAt(0) == nullptr);
5626 SetRawInputAt(0u, special_input);
5627 special_input->AddUseAt(this, 0);
5631 * Performs an initialization check on its Class object input.
5633 class HClinitCheck FINAL : public HExpression<1> {
5635 HClinitCheck(HLoadClass* constant, uint32_t dex_pc)
5637 Primitive::kPrimNot,
5638 SideEffects::AllChanges(), // Assume write/read on all fields/arrays.
5640 SetRawInputAt(0, constant);
5643 bool CanBeMoved() const OVERRIDE { return true; }
5644 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5648 bool NeedsEnvironment() const OVERRIDE {
5649 // May call runtime to initialize the class.
5653 bool CanThrow() const OVERRIDE { return true; }
5655 HLoadClass* GetLoadClass() const { return InputAt(0)->AsLoadClass(); }
5657 DECLARE_INSTRUCTION(ClinitCheck);
5660 DISALLOW_COPY_AND_ASSIGN(HClinitCheck);
5663 class HStaticFieldGet FINAL : public HExpression<1> {
5665 HStaticFieldGet(HInstruction* cls,
5666 Primitive::Type field_type,
5667 MemberOffset field_offset,
5670 uint16_t declaring_class_def_index,
5671 const DexFile& dex_file,
5672 Handle<mirror::DexCache> dex_cache,
5674 : HExpression(field_type,
5675 SideEffects::FieldReadOfType(field_type, is_volatile),
5677 field_info_(field_offset,
5681 declaring_class_def_index,
5684 SetRawInputAt(0, cls);
5688 bool CanBeMoved() const OVERRIDE { return !IsVolatile(); }
5690 bool InstructionDataEquals(HInstruction* other) const OVERRIDE {
5691 HStaticFieldGet* other_get = other->AsStaticFieldGet();
5692 return GetFieldOffset().SizeValue() == other_get->GetFieldOffset().SizeValue();
5695 size_t ComputeHashCode() const OVERRIDE {
5696 return (HInstruction::ComputeHashCode() << 7) | GetFieldOffset().SizeValue();
5699 const FieldInfo& GetFieldInfo() const { return field_info_; }
5700 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5701 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5702 bool IsVolatile() const { return field_info_.IsVolatile(); }
5704 DECLARE_INSTRUCTION(StaticFieldGet);
5707 const FieldInfo field_info_;
5709 DISALLOW_COPY_AND_ASSIGN(HStaticFieldGet);
5712 class HStaticFieldSet FINAL : public HTemplateInstruction<2> {
5714 HStaticFieldSet(HInstruction* cls,
5715 HInstruction* value,
5716 Primitive::Type field_type,
5717 MemberOffset field_offset,
5720 uint16_t declaring_class_def_index,
5721 const DexFile& dex_file,
5722 Handle<mirror::DexCache> dex_cache,
5724 : HTemplateInstruction(SideEffects::FieldWriteOfType(field_type, is_volatile),
5726 field_info_(field_offset,
5730 declaring_class_def_index,
5733 SetPackedFlag<kFlagValueCanBeNull>(true);
5734 SetRawInputAt(0, cls);
5735 SetRawInputAt(1, value);
5738 const FieldInfo& GetFieldInfo() const { return field_info_; }
5739 MemberOffset GetFieldOffset() const { return field_info_.GetFieldOffset(); }
5740 Primitive::Type GetFieldType() const { return field_info_.GetFieldType(); }
5741 bool IsVolatile() const { return field_info_.IsVolatile(); }
5743 HInstruction* GetValue() const { return InputAt(1); }
5744 bool GetValueCanBeNull() const { return GetPackedFlag<kFlagValueCanBeNull>(); }
5745 void ClearValueCanBeNull() { SetPackedFlag<kFlagValueCanBeNull>(false); }
5747 DECLARE_INSTRUCTION(StaticFieldSet);
5750 static constexpr size_t kFlagValueCanBeNull = kNumberOfGenericPackedBits;
5751 static constexpr size_t kNumberOfStaticFieldSetPackedBits = kFlagValueCanBeNull + 1;
5752 static_assert(kNumberOfStaticFieldSetPackedBits <= kMaxNumberOfPackedBits,
5753 "Too many packed fields.");
5755 const FieldInfo field_info_;
5757 DISALLOW_COPY_AND_ASSIGN(HStaticFieldSet);
5760 class HUnresolvedInstanceFieldGet FINAL : public HExpression<1> {
5762 HUnresolvedInstanceFieldGet(HInstruction* obj,
5763 Primitive::Type field_type,
5764 uint32_t field_index,
5766 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
5767 field_index_(field_index) {
5768 SetRawInputAt(0, obj);
5771 bool NeedsEnvironment() const OVERRIDE { return true; }
5772 bool CanThrow() const OVERRIDE { return true; }
5774 Primitive::Type GetFieldType() const { return GetType(); }
5775 uint32_t GetFieldIndex() const { return field_index_; }
5777 DECLARE_INSTRUCTION(UnresolvedInstanceFieldGet);
5780 const uint32_t field_index_;
5782 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldGet);
5785 class HUnresolvedInstanceFieldSet FINAL : public HTemplateInstruction<2> {
5787 HUnresolvedInstanceFieldSet(HInstruction* obj,
5788 HInstruction* value,
5789 Primitive::Type field_type,
5790 uint32_t field_index,
5792 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
5793 field_index_(field_index) {
5794 SetPackedField<FieldTypeField>(field_type);
5795 DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
5796 SetRawInputAt(0, obj);
5797 SetRawInputAt(1, value);
5800 bool NeedsEnvironment() const OVERRIDE { return true; }
5801 bool CanThrow() const OVERRIDE { return true; }
5803 Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
5804 uint32_t GetFieldIndex() const { return field_index_; }
5806 DECLARE_INSTRUCTION(UnresolvedInstanceFieldSet);
5809 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
5810 static constexpr size_t kFieldFieldTypeSize =
5811 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5812 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
5813 kFieldFieldType + kFieldFieldTypeSize;
5814 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5815 "Too many packed fields.");
5816 using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
5818 const uint32_t field_index_;
5820 DISALLOW_COPY_AND_ASSIGN(HUnresolvedInstanceFieldSet);
5823 class HUnresolvedStaticFieldGet FINAL : public HExpression<0> {
5825 HUnresolvedStaticFieldGet(Primitive::Type field_type,
5826 uint32_t field_index,
5828 : HExpression(field_type, SideEffects::AllExceptGCDependency(), dex_pc),
5829 field_index_(field_index) {
5832 bool NeedsEnvironment() const OVERRIDE { return true; }
5833 bool CanThrow() const OVERRIDE { return true; }
5835 Primitive::Type GetFieldType() const { return GetType(); }
5836 uint32_t GetFieldIndex() const { return field_index_; }
5838 DECLARE_INSTRUCTION(UnresolvedStaticFieldGet);
5841 const uint32_t field_index_;
5843 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldGet);
5846 class HUnresolvedStaticFieldSet FINAL : public HTemplateInstruction<1> {
5848 HUnresolvedStaticFieldSet(HInstruction* value,
5849 Primitive::Type field_type,
5850 uint32_t field_index,
5852 : HTemplateInstruction(SideEffects::AllExceptGCDependency(), dex_pc),
5853 field_index_(field_index) {
5854 SetPackedField<FieldTypeField>(field_type);
5855 DCHECK_EQ(Primitive::PrimitiveKind(field_type), Primitive::PrimitiveKind(value->GetType()));
5856 SetRawInputAt(0, value);
5859 bool NeedsEnvironment() const OVERRIDE { return true; }
5860 bool CanThrow() const OVERRIDE { return true; }
5862 Primitive::Type GetFieldType() const { return GetPackedField<FieldTypeField>(); }
5863 uint32_t GetFieldIndex() const { return field_index_; }
5865 DECLARE_INSTRUCTION(UnresolvedStaticFieldSet);
5868 static constexpr size_t kFieldFieldType = HInstruction::kNumberOfGenericPackedBits;
5869 static constexpr size_t kFieldFieldTypeSize =
5870 MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
5871 static constexpr size_t kNumberOfUnresolvedStaticFieldSetPackedBits =
5872 kFieldFieldType + kFieldFieldTypeSize;
5873 static_assert(kNumberOfUnresolvedStaticFieldSetPackedBits <= HInstruction::kMaxNumberOfPackedBits,
5874 "Too many packed fields.");
5875 using FieldTypeField = BitField<Primitive::Type, kFieldFieldType, kFieldFieldTypeSize>;
5877 const uint32_t field_index_;
5879 DISALLOW_COPY_AND_ASSIGN(HUnresolvedStaticFieldSet);
5882 // Implement the move-exception DEX instruction.
5883 class HLoadException FINAL : public HExpression<0> {
5885 explicit HLoadException(uint32_t dex_pc = kNoDexPc)
5886 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc) {}
5888 bool CanBeNull() const OVERRIDE { return false; }
5890 DECLARE_INSTRUCTION(LoadException);
5893 DISALLOW_COPY_AND_ASSIGN(HLoadException);
5896 // Implicit part of move-exception which clears thread-local exception storage.
5897 // Must not be removed because the runtime expects the TLS to get cleared.
5898 class HClearException FINAL : public HTemplateInstruction<0> {
5900 explicit HClearException(uint32_t dex_pc = kNoDexPc)
5901 : HTemplateInstruction(SideEffects::AllWrites(), dex_pc) {}
5903 DECLARE_INSTRUCTION(ClearException);
5906 DISALLOW_COPY_AND_ASSIGN(HClearException);
5909 class HThrow FINAL : public HTemplateInstruction<1> {
5911 HThrow(HInstruction* exception, uint32_t dex_pc)
5912 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
5913 SetRawInputAt(0, exception);
5916 bool IsControlFlow() const OVERRIDE { return true; }
5918 bool NeedsEnvironment() const OVERRIDE { return true; }
5920 bool CanThrow() const OVERRIDE { return true; }
5923 DECLARE_INSTRUCTION(Throw);
5926 DISALLOW_COPY_AND_ASSIGN(HThrow);
5930 * Implementation strategies for the code generator of a HInstanceOf
5933 enum class TypeCheckKind {
5934 kUnresolvedCheck, // Check against an unresolved type.
5935 kExactCheck, // Can do a single class compare.
5936 kClassHierarchyCheck, // Can just walk the super class chain.
5937 kAbstractClassCheck, // Can just walk the super class chain, starting one up.
5938 kInterfaceCheck, // No optimization yet when checking against an interface.
5939 kArrayObjectCheck, // Can just check if the array is not primitive.
5940 kArrayCheck, // No optimization yet when checking against a generic array.
5944 std::ostream& operator<<(std::ostream& os, TypeCheckKind rhs);
5946 class HInstanceOf FINAL : public HExpression<2> {
5948 HInstanceOf(HInstruction* object,
5949 HLoadClass* constant,
5950 TypeCheckKind check_kind,
5952 : HExpression(Primitive::kPrimBoolean,
5953 SideEffectsForArchRuntimeCalls(check_kind),
5955 SetPackedField<TypeCheckKindField>(check_kind);
5956 SetPackedFlag<kFlagMustDoNullCheck>(true);
5957 SetRawInputAt(0, object);
5958 SetRawInputAt(1, constant);
5961 bool CanBeMoved() const OVERRIDE { return true; }
5963 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
5967 bool NeedsEnvironment() const OVERRIDE {
5968 return CanCallRuntime(GetTypeCheckKind());
5971 // Used only in code generation.
5972 bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
5973 void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
5974 TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
5975 bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
5977 static bool CanCallRuntime(TypeCheckKind check_kind) {
5978 // Mips currently does runtime calls for any other checks.
5979 return check_kind != TypeCheckKind::kExactCheck;
5982 static SideEffects SideEffectsForArchRuntimeCalls(TypeCheckKind check_kind) {
5983 return CanCallRuntime(check_kind) ? SideEffects::CanTriggerGC() : SideEffects::None();
5986 DECLARE_INSTRUCTION(InstanceOf);
5989 static constexpr size_t kFieldTypeCheckKind = kNumberOfExpressionPackedBits;
5990 static constexpr size_t kFieldTypeCheckKindSize =
5991 MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
5992 static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
5993 static constexpr size_t kNumberOfInstanceOfPackedBits = kFlagMustDoNullCheck + 1;
5994 static_assert(kNumberOfInstanceOfPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
5995 using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
5997 DISALLOW_COPY_AND_ASSIGN(HInstanceOf);
6000 class HBoundType FINAL : public HExpression<1> {
6002 HBoundType(HInstruction* input, uint32_t dex_pc = kNoDexPc)
6003 : HExpression(Primitive::kPrimNot, SideEffects::None(), dex_pc),
6004 upper_bound_(ReferenceTypeInfo::CreateInvalid()) {
6005 SetPackedFlag<kFlagUpperCanBeNull>(true);
6006 SetPackedFlag<kFlagCanBeNull>(true);
6007 DCHECK_EQ(input->GetType(), Primitive::kPrimNot);
6008 SetRawInputAt(0, input);
6011 // {Get,Set}Upper* should only be used in reference type propagation.
6012 const ReferenceTypeInfo& GetUpperBound() const { return upper_bound_; }
6013 bool GetUpperCanBeNull() const { return GetPackedFlag<kFlagUpperCanBeNull>(); }
6014 void SetUpperBound(const ReferenceTypeInfo& upper_bound, bool can_be_null);
6016 void SetCanBeNull(bool can_be_null) {
6017 DCHECK(GetUpperCanBeNull() || !can_be_null);
6018 SetPackedFlag<kFlagCanBeNull>(can_be_null);
6021 bool CanBeNull() const OVERRIDE { return GetPackedFlag<kFlagCanBeNull>(); }
6023 DECLARE_INSTRUCTION(BoundType);
6026 // Represents the top constraint that can_be_null_ cannot exceed (i.e. if this
6027 // is false then CanBeNull() cannot be true).
6028 static constexpr size_t kFlagUpperCanBeNull = kNumberOfExpressionPackedBits;
6029 static constexpr size_t kFlagCanBeNull = kFlagUpperCanBeNull + 1;
6030 static constexpr size_t kNumberOfBoundTypePackedBits = kFlagCanBeNull + 1;
6031 static_assert(kNumberOfBoundTypePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6033 // Encodes the most upper class that this instruction can have. In other words
6034 // it is always the case that GetUpperBound().IsSupertypeOf(GetReferenceType()).
6035 // It is used to bound the type in cases like:
6036 // if (x instanceof ClassX) {
6037 // // uper_bound_ will be ClassX
6039 ReferenceTypeInfo upper_bound_;
6041 DISALLOW_COPY_AND_ASSIGN(HBoundType);
6044 class HCheckCast FINAL : public HTemplateInstruction<2> {
6046 HCheckCast(HInstruction* object,
6047 HLoadClass* constant,
6048 TypeCheckKind check_kind,
6050 : HTemplateInstruction(SideEffects::CanTriggerGC(), dex_pc) {
6051 SetPackedField<TypeCheckKindField>(check_kind);
6052 SetPackedFlag<kFlagMustDoNullCheck>(true);
6053 SetRawInputAt(0, object);
6054 SetRawInputAt(1, constant);
6057 bool CanBeMoved() const OVERRIDE { return true; }
6059 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE {
6063 bool NeedsEnvironment() const OVERRIDE {
6064 // Instruction may throw a CheckCastError.
6068 bool CanThrow() const OVERRIDE { return true; }
6070 bool MustDoNullCheck() const { return GetPackedFlag<kFlagMustDoNullCheck>(); }
6071 void ClearMustDoNullCheck() { SetPackedFlag<kFlagMustDoNullCheck>(false); }
6072 TypeCheckKind GetTypeCheckKind() const { return GetPackedField<TypeCheckKindField>(); }
6073 bool IsExactCheck() const { return GetTypeCheckKind() == TypeCheckKind::kExactCheck; }
6075 DECLARE_INSTRUCTION(CheckCast);
6078 static constexpr size_t kFieldTypeCheckKind = kNumberOfGenericPackedBits;
6079 static constexpr size_t kFieldTypeCheckKindSize =
6080 MinimumBitsToStore(static_cast<size_t>(TypeCheckKind::kLast));
6081 static constexpr size_t kFlagMustDoNullCheck = kFieldTypeCheckKind + kFieldTypeCheckKindSize;
6082 static constexpr size_t kNumberOfCheckCastPackedBits = kFlagMustDoNullCheck + 1;
6083 static_assert(kNumberOfCheckCastPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
6084 using TypeCheckKindField = BitField<TypeCheckKind, kFieldTypeCheckKind, kFieldTypeCheckKindSize>;
6086 DISALLOW_COPY_AND_ASSIGN(HCheckCast);
6089 class HMemoryBarrier FINAL : public HTemplateInstruction<0> {
6091 explicit HMemoryBarrier(MemBarrierKind barrier_kind, uint32_t dex_pc = kNoDexPc)
6092 : HTemplateInstruction(
6093 SideEffects::AllWritesAndReads(), dex_pc) { // Assume write/read on all fields/arrays.
6094 SetPackedField<BarrierKindField>(barrier_kind);
6097 MemBarrierKind GetBarrierKind() { return GetPackedField<BarrierKindField>(); }
6099 DECLARE_INSTRUCTION(MemoryBarrier);
6102 static constexpr size_t kFieldBarrierKind = HInstruction::kNumberOfGenericPackedBits;
6103 static constexpr size_t kFieldBarrierKindSize =
6104 MinimumBitsToStore(static_cast<size_t>(kLastBarrierKind));
6105 static constexpr size_t kNumberOfMemoryBarrierPackedBits =
6106 kFieldBarrierKind + kFieldBarrierKindSize;
6107 static_assert(kNumberOfMemoryBarrierPackedBits <= kMaxNumberOfPackedBits,
6108 "Too many packed fields.");
6109 using BarrierKindField = BitField<MemBarrierKind, kFieldBarrierKind, kFieldBarrierKindSize>;
6111 DISALLOW_COPY_AND_ASSIGN(HMemoryBarrier);
6114 class HMonitorOperation FINAL : public HTemplateInstruction<1> {
6116 enum class OperationKind {
6122 HMonitorOperation(HInstruction* object, OperationKind kind, uint32_t dex_pc)
6123 : HTemplateInstruction(
6124 SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
6126 SetPackedField<OperationKindField>(kind);
6127 SetRawInputAt(0, object);
6130 // Instruction may go into runtime, so we need an environment.
6131 bool NeedsEnvironment() const OVERRIDE { return true; }
6133 bool CanThrow() const OVERRIDE {
6134 // Verifier guarantees that monitor-exit cannot throw.
6135 // This is important because it allows the HGraphBuilder to remove
6136 // a dead throw-catch loop generated for `synchronized` blocks/methods.
6140 OperationKind GetOperationKind() const { return GetPackedField<OperationKindField>(); }
6141 bool IsEnter() const { return GetOperationKind() == OperationKind::kEnter; }
6143 DECLARE_INSTRUCTION(MonitorOperation);
6146 static constexpr size_t kFieldOperationKind = HInstruction::kNumberOfGenericPackedBits;
6147 static constexpr size_t kFieldOperationKindSize =
6148 MinimumBitsToStore(static_cast<size_t>(OperationKind::kLast));
6149 static constexpr size_t kNumberOfMonitorOperationPackedBits =
6150 kFieldOperationKind + kFieldOperationKindSize;
6151 static_assert(kNumberOfMonitorOperationPackedBits <= HInstruction::kMaxNumberOfPackedBits,
6152 "Too many packed fields.");
6153 using OperationKindField = BitField<OperationKind, kFieldOperationKind, kFieldOperationKindSize>;
6156 DISALLOW_COPY_AND_ASSIGN(HMonitorOperation);
6159 class HSelect FINAL : public HExpression<3> {
6161 HSelect(HInstruction* condition,
6162 HInstruction* true_value,
6163 HInstruction* false_value,
6165 : HExpression(HPhi::ToPhiType(true_value->GetType()), SideEffects::None(), dex_pc) {
6166 DCHECK_EQ(HPhi::ToPhiType(true_value->GetType()), HPhi::ToPhiType(false_value->GetType()));
6168 // First input must be `true_value` or `false_value` to allow codegens to
6169 // use the SameAsFirstInput allocation policy. We make it `false_value`, so
6170 // that architectures which implement HSelect as a conditional move also
6171 // will not need to invert the condition.
6172 SetRawInputAt(0, false_value);
6173 SetRawInputAt(1, true_value);
6174 SetRawInputAt(2, condition);
6177 HInstruction* GetFalseValue() const { return InputAt(0); }
6178 HInstruction* GetTrueValue() const { return InputAt(1); }
6179 HInstruction* GetCondition() const { return InputAt(2); }
6181 bool CanBeMoved() const OVERRIDE { return true; }
6182 bool InstructionDataEquals(HInstruction* other ATTRIBUTE_UNUSED) const OVERRIDE { return true; }
6184 bool CanBeNull() const OVERRIDE {
6185 return GetTrueValue()->CanBeNull() || GetFalseValue()->CanBeNull();
6188 DECLARE_INSTRUCTION(Select);
6191 DISALLOW_COPY_AND_ASSIGN(HSelect);
6194 class MoveOperands : public ArenaObject<kArenaAllocMoveOperands> {
6196 MoveOperands(Location source,
6197 Location destination,
6198 Primitive::Type type,
6199 HInstruction* instruction)
6200 : source_(source), destination_(destination), type_(type), instruction_(instruction) {}
6202 Location GetSource() const { return source_; }
6203 Location GetDestination() const { return destination_; }
6205 void SetSource(Location value) { source_ = value; }
6206 void SetDestination(Location value) { destination_ = value; }
6208 // The parallel move resolver marks moves as "in-progress" by clearing the
6209 // destination (but not the source).
6210 Location MarkPending() {
6211 DCHECK(!IsPending());
6212 Location dest = destination_;
6213 destination_ = Location::NoLocation();
6217 void ClearPending(Location dest) {
6218 DCHECK(IsPending());
6219 destination_ = dest;
6222 bool IsPending() const {
6223 DCHECK(source_.IsValid() || destination_.IsInvalid());
6224 return destination_.IsInvalid() && source_.IsValid();
6227 // True if this blocks a move from the given location.
6228 bool Blocks(Location loc) const {
6229 return !IsEliminated() && source_.OverlapsWith(loc);
6232 // A move is redundant if it's been eliminated, if its source and
6233 // destination are the same, or if its destination is unneeded.
6234 bool IsRedundant() const {
6235 return IsEliminated() || destination_.IsInvalid() || source_.Equals(destination_);
6238 // We clear both operands to indicate move that's been eliminated.
6240 source_ = destination_ = Location::NoLocation();
6243 bool IsEliminated() const {
6244 DCHECK(!source_.IsInvalid() || destination_.IsInvalid());
6245 return source_.IsInvalid();
6248 Primitive::Type GetType() const { return type_; }
6250 bool Is64BitMove() const {
6251 return Primitive::Is64BitType(type_);
6254 HInstruction* GetInstruction() const { return instruction_; }
6258 Location destination_;
6259 // The type this move is for.
6260 Primitive::Type type_;
6261 // The instruction this move is assocatied with. Null when this move is
6262 // for moving an input in the expected locations of user (including a phi user).
6263 // This is only used in debug mode, to ensure we do not connect interval siblings
6264 // in the same parallel move.
6265 HInstruction* instruction_;
6268 std::ostream& operator<<(std::ostream& os, const MoveOperands& rhs);
6270 static constexpr size_t kDefaultNumberOfMoves = 4;
6272 class HParallelMove FINAL : public HTemplateInstruction<0> {
6274 explicit HParallelMove(ArenaAllocator* arena, uint32_t dex_pc = kNoDexPc)
6275 : HTemplateInstruction(SideEffects::None(), dex_pc),
6276 moves_(arena->Adapter(kArenaAllocMoveOperands)) {
6277 moves_.reserve(kDefaultNumberOfMoves);
6280 void AddMove(Location source,
6281 Location destination,
6282 Primitive::Type type,
6283 HInstruction* instruction) {
6284 DCHECK(source.IsValid());
6285 DCHECK(destination.IsValid());
6286 if (kIsDebugBuild) {
6287 if (instruction != nullptr) {
6288 for (const MoveOperands& move : moves_) {
6289 if (move.GetInstruction() == instruction) {
6290 // Special case the situation where the move is for the spill slot
6291 // of the instruction.
6292 if ((GetPrevious() == instruction)
6293 || ((GetPrevious() == nullptr)
6294 && instruction->IsPhi()
6295 && instruction->GetBlock() == GetBlock())) {
6296 DCHECK_NE(destination.GetKind(), move.GetDestination().GetKind())
6297 << "Doing parallel moves for the same instruction.";
6299 DCHECK(false) << "Doing parallel moves for the same instruction.";
6304 for (const MoveOperands& move : moves_) {
6305 DCHECK(!destination.OverlapsWith(move.GetDestination()))
6306 << "Overlapped destination for two moves in a parallel move: "
6307 << move.GetSource() << " ==> " << move.GetDestination() << " and "
6308 << source << " ==> " << destination;
6311 moves_.emplace_back(source, destination, type, instruction);
6314 MoveOperands* MoveOperandsAt(size_t index) {
6315 return &moves_[index];
6318 size_t NumMoves() const { return moves_.size(); }
6320 DECLARE_INSTRUCTION(ParallelMove);
6323 ArenaVector<MoveOperands> moves_;
6325 DISALLOW_COPY_AND_ASSIGN(HParallelMove);
6330 #if defined(ART_ENABLE_CODEGEN_arm) || defined(ART_ENABLE_CODEGEN_arm64)
6331 #include "nodes_shared.h"
6333 #ifdef ART_ENABLE_CODEGEN_arm
6334 #include "nodes_arm.h"
6336 #ifdef ART_ENABLE_CODEGEN_arm64
6337 #include "nodes_arm64.h"
6339 #ifdef ART_ENABLE_CODEGEN_x86
6340 #include "nodes_x86.h"
6345 class HGraphVisitor : public ValueObject {
6347 explicit HGraphVisitor(HGraph* graph) : graph_(graph) {}
6348 virtual ~HGraphVisitor() {}
6350 virtual void VisitInstruction(HInstruction* instruction ATTRIBUTE_UNUSED) {}
6351 virtual void VisitBasicBlock(HBasicBlock* block);
6353 // Visit the graph following basic block insertion order.
6354 void VisitInsertionOrder();
6356 // Visit the graph following dominator tree reverse post-order.
6357 void VisitReversePostOrder();
6359 HGraph* GetGraph() const { return graph_; }
6361 // Visit functions for instruction classes.
6362 #define DECLARE_VISIT_INSTRUCTION(name, super) \
6363 virtual void Visit##name(H##name* instr) { VisitInstruction(instr); }
6365 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6367 #undef DECLARE_VISIT_INSTRUCTION
6370 HGraph* const graph_;
6372 DISALLOW_COPY_AND_ASSIGN(HGraphVisitor);
6375 class HGraphDelegateVisitor : public HGraphVisitor {
6377 explicit HGraphDelegateVisitor(HGraph* graph) : HGraphVisitor(graph) {}
6378 virtual ~HGraphDelegateVisitor() {}
6380 // Visit functions that delegate to to super class.
6381 #define DECLARE_VISIT_INSTRUCTION(name, super) \
6382 void Visit##name(H##name* instr) OVERRIDE { Visit##super(instr); }
6384 FOR_EACH_INSTRUCTION(DECLARE_VISIT_INSTRUCTION)
6386 #undef DECLARE_VISIT_INSTRUCTION
6389 DISALLOW_COPY_AND_ASSIGN(HGraphDelegateVisitor);
6392 class HInsertionOrderIterator : public ValueObject {
6394 explicit HInsertionOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {}
6396 bool Done() const { return index_ == graph_.GetBlocks().size(); }
6397 HBasicBlock* Current() const { return graph_.GetBlocks()[index_]; }
6398 void Advance() { ++index_; }
6401 const HGraph& graph_;
6404 DISALLOW_COPY_AND_ASSIGN(HInsertionOrderIterator);
6407 class HReversePostOrderIterator : public ValueObject {
6409 explicit HReversePostOrderIterator(const HGraph& graph) : graph_(graph), index_(0) {
6410 // Check that reverse post order of the graph has been built.
6411 DCHECK(!graph.GetReversePostOrder().empty());
6414 bool Done() const { return index_ == graph_.GetReversePostOrder().size(); }
6415 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_]; }
6416 void Advance() { ++index_; }
6419 const HGraph& graph_;
6422 DISALLOW_COPY_AND_ASSIGN(HReversePostOrderIterator);
6425 class HPostOrderIterator : public ValueObject {
6427 explicit HPostOrderIterator(const HGraph& graph)
6428 : graph_(graph), index_(graph_.GetReversePostOrder().size()) {
6429 // Check that reverse post order of the graph has been built.
6430 DCHECK(!graph.GetReversePostOrder().empty());
6433 bool Done() const { return index_ == 0; }
6434 HBasicBlock* Current() const { return graph_.GetReversePostOrder()[index_ - 1u]; }
6435 void Advance() { --index_; }
6438 const HGraph& graph_;
6441 DISALLOW_COPY_AND_ASSIGN(HPostOrderIterator);
6444 class HLinearPostOrderIterator : public ValueObject {
6446 explicit HLinearPostOrderIterator(const HGraph& graph)
6447 : order_(graph.GetLinearOrder()), index_(graph.GetLinearOrder().size()) {}
6449 bool Done() const { return index_ == 0; }
6451 HBasicBlock* Current() const { return order_[index_ - 1u]; }
6455 DCHECK_GE(index_, 0U);
6459 const ArenaVector<HBasicBlock*>& order_;
6462 DISALLOW_COPY_AND_ASSIGN(HLinearPostOrderIterator);
6465 class HLinearOrderIterator : public ValueObject {
6467 explicit HLinearOrderIterator(const HGraph& graph)
6468 : order_(graph.GetLinearOrder()), index_(0) {}
6470 bool Done() const { return index_ == order_.size(); }
6471 HBasicBlock* Current() const { return order_[index_]; }
6472 void Advance() { ++index_; }
6475 const ArenaVector<HBasicBlock*>& order_;
6478 DISALLOW_COPY_AND_ASSIGN(HLinearOrderIterator);
6481 // Iterator over the blocks that art part of the loop. Includes blocks part
6482 // of an inner loop. The order in which the blocks are iterated is on their
6484 class HBlocksInLoopIterator : public ValueObject {
6486 explicit HBlocksInLoopIterator(const HLoopInformation& info)
6487 : blocks_in_loop_(info.GetBlocks()),
6488 blocks_(info.GetHeader()->GetGraph()->GetBlocks()),
6490 if (!blocks_in_loop_.IsBitSet(index_)) {
6495 bool Done() const { return index_ == blocks_.size(); }
6496 HBasicBlock* Current() const { return blocks_[index_]; }
6499 for (size_t e = blocks_.size(); index_ < e; ++index_) {
6500 if (blocks_in_loop_.IsBitSet(index_)) {
6507 const BitVector& blocks_in_loop_;
6508 const ArenaVector<HBasicBlock*>& blocks_;
6511 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopIterator);
6514 // Iterator over the blocks that art part of the loop. Includes blocks part
6515 // of an inner loop. The order in which the blocks are iterated is reverse
6517 class HBlocksInLoopReversePostOrderIterator : public ValueObject {
6519 explicit HBlocksInLoopReversePostOrderIterator(const HLoopInformation& info)
6520 : blocks_in_loop_(info.GetBlocks()),
6521 blocks_(info.GetHeader()->GetGraph()->GetReversePostOrder()),
6523 if (!blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6528 bool Done() const { return index_ == blocks_.size(); }
6529 HBasicBlock* Current() const { return blocks_[index_]; }
6532 for (size_t e = blocks_.size(); index_ < e; ++index_) {
6533 if (blocks_in_loop_.IsBitSet(blocks_[index_]->GetBlockId())) {
6540 const BitVector& blocks_in_loop_;
6541 const ArenaVector<HBasicBlock*>& blocks_;
6544 DISALLOW_COPY_AND_ASSIGN(HBlocksInLoopReversePostOrderIterator);
6547 inline int64_t Int64FromConstant(HConstant* constant) {
6548 if (constant->IsIntConstant()) {
6549 return constant->AsIntConstant()->GetValue();
6550 } else if (constant->IsLongConstant()) {
6551 return constant->AsLongConstant()->GetValue();
6553 DCHECK(constant->IsNullConstant()) << constant->DebugName();
6558 inline bool IsSameDexFile(const DexFile& lhs, const DexFile& rhs) {
6559 // For the purposes of the compiler, the dex files must actually be the same object
6560 // if we want to safely treat them as the same. This is especially important for JIT
6561 // as custom class loaders can open the same underlying file (or memory) multiple
6562 // times and provide different class resolution but no two class loaders should ever
6563 // use the same DexFile object - doing so is an unsupported hack that can lead to
6564 // all sorts of weird failures.
6565 return &lhs == &rhs;
6568 #define INSTRUCTION_TYPE_CHECK(type, super) \
6569 inline bool HInstruction::Is##type() const { return GetKind() == k##type; } \
6570 inline const H##type* HInstruction::As##type() const { \
6571 return Is##type() ? down_cast<const H##type*>(this) : nullptr; \
6573 inline H##type* HInstruction::As##type() { \
6574 return Is##type() ? static_cast<H##type*>(this) : nullptr; \
6577 FOR_EACH_CONCRETE_INSTRUCTION(INSTRUCTION_TYPE_CHECK)
6578 #undef INSTRUCTION_TYPE_CHECK
6580 // Create space in `blocks` for adding `number_of_new_blocks` entries
6581 // starting at location `at`. Blocks after `at` are moved accordingly.
6582 inline void MakeRoomFor(ArenaVector<HBasicBlock*>* blocks,
6583 size_t number_of_new_blocks,
6585 DCHECK_LT(after, blocks->size());
6586 size_t old_size = blocks->size();
6587 size_t new_size = old_size + number_of_new_blocks;
6588 blocks->resize(new_size);
6589 std::copy_backward(blocks->begin() + after + 1u, blocks->begin() + old_size, blocks->end());
6594 #endif // ART_COMPILER_OPTIMIZING_NODES_H_