From 88f288e3564d79d87c0cd8bb831ec5a791ba4861 Mon Sep 17 00:00:00 2001 From: Nicolas Geoffray Date: Wed, 29 Jun 2016 08:17:52 +0000 Subject: [PATCH] Revert "Optimize IMT" Bug: 29188168 (for initial CL) Bug: 29778499 (reason for revert) This reverts commit badee9820fcf5dca5f8c46c3215ae1779ee7736e. Change-Id: I32b8463122c3521e233c34ca95c96a5078e88848 --- compiler/driver/compiler_driver.cc | 21 +-- compiler/image_writer.cc | 65 +-------- compiler/image_writer.h | 6 - compiler/optimizing/code_generator_arm.cc | 15 +-- compiler/optimizing/code_generator_arm64.cc | 12 +- compiler/optimizing/code_generator_mips.cc | 14 +- compiler/optimizing/code_generator_mips64.cc | 6 +- compiler/optimizing/code_generator_x86.cc | 15 +-- compiler/optimizing/code_generator_x86_64.cc | 15 +-- compiler/optimizing/inliner.cc | 4 +- patchoat/patchoat.cc | 12 -- patchoat/patchoat.h | 1 - runtime/art_method.h | 16 --- runtime/class_linker.cc | 150 +++++---------------- runtime/class_linker.h | 23 ++-- runtime/class_linker_test.cc | 3 +- runtime/entrypoints/entrypoint_utils-inl.h | 7 +- .../quick/quick_trampoline_entrypoints.cc | 13 +- runtime/gc/space/image_space.cc | 4 - runtime/image-inl.h | 19 --- runtime/image.cc | 2 +- runtime/image.h | 6 - runtime/imtable.h | 77 ----------- runtime/interpreter/interpreter_common.h | 2 +- runtime/mirror/class-inl.h | 74 ++++++---- runtime/mirror/class.cc | 17 ++- runtime/mirror/class.h | 51 ++++--- 27 files changed, 179 insertions(+), 471 deletions(-) delete mode 100644 runtime/imtable.h diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc index 131be37a3..a4b48892f 100644 --- a/compiler/driver/compiler_driver.cc +++ b/compiler/driver/compiler_driver.cc @@ -2522,28 +2522,11 @@ class InitializeArrayClassesAndCreateConflictTablesVisitor : public ClassVisitor true); } // Create the conflict tables. - FillIMTAndConflictTables(klass); - return true; - } - - private: - void FillIMTAndConflictTables(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_) { - if (!klass->ShouldHaveImt()) { - return; - } - if (visited_classes_.find(klass) != visited_classes_.end()) { - return; - } - if (klass->HasSuperClass()) { - FillIMTAndConflictTables(klass->GetSuperClass()); - } - if (!klass->IsTemp()) { + if (!klass->IsTemp() && klass->ShouldHaveEmbeddedImtAndVTable()) { Runtime::Current()->GetClassLinker()->FillIMTAndConflictTables(klass); } - visited_classes_.insert(klass); + return true; } - - std::set visited_classes_; }; void CompilerDriver::InitializeClasses(jobject class_loader, diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc index 063eb1171..da1056847 100644 --- a/compiler/image_writer.cc +++ b/compiler/image_writer.cc @@ -1232,10 +1232,9 @@ void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) { } // Assign offsets for all runtime methods in the IMT since these may hold conflict tables // live. - if (as_klass->ShouldHaveImt()) { - ImTable* imt = as_klass->GetImt(target_ptr_size_); - for (size_t i = 0; i < ImTable::kSize; ++i) { - ArtMethod* imt_method = imt->Get(i, target_ptr_size_); + if (as_klass->ShouldHaveEmbeddedImtAndVTable()) { + for (size_t i = 0; i < mirror::Class::kImtSize; ++i) { + ArtMethod* imt_method = as_klass->GetEmbeddedImTableEntry(i, target_ptr_size_); DCHECK(imt_method != nullptr); if (imt_method->IsRuntimeMethod() && !IsInBootImage(imt_method) && @@ -1244,11 +1243,6 @@ void ImageWriter::WalkFieldsInOrder(mirror::Object* obj) { } } } - - if (as_klass->ShouldHaveImt()) { - ImTable* imt = as_klass->GetImt(target_ptr_size_); - TryAssignImTableOffset(imt, oat_index); - } } else if (h_obj->IsObjectArray()) { // Walk elements of an object array. int32_t length = h_obj->AsObjectArray()->GetLength(); @@ -1275,23 +1269,6 @@ bool ImageWriter::NativeRelocationAssigned(void* ptr) const { return native_object_relocations_.find(ptr) != native_object_relocations_.end(); } -void ImageWriter::TryAssignImTableOffset(ImTable* imt, size_t oat_index) { - // No offset, or already assigned. - if (imt == nullptr || IsInBootImage(imt) || NativeRelocationAssigned(imt)) { - return; - } - // If the method is a conflict method we also want to assign the conflict table offset. - ImageInfo& image_info = GetImageInfo(oat_index); - const size_t size = ImTable::SizeInBytes(target_ptr_size_); - native_object_relocations_.emplace( - imt, - NativeObjectRelocation { - oat_index, - image_info.bin_slot_sizes_[kBinImTable], - kNativeObjectRelocationTypeIMTable}); - image_info.bin_slot_sizes_[kBinImTable] += size; -} - void ImageWriter::TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) { // No offset, or already assigned. if (table == nullptr || NativeRelocationAssigned(table)) { @@ -1414,7 +1391,6 @@ void ImageWriter::CalculateNewObjectOffsets() { bin_offset = RoundUp(bin_offset, method_alignment); break; } - case kBinImTable: case kBinIMTConflictTable: { bin_offset = RoundUp(bin_offset, target_ptr_size_); break; @@ -1485,10 +1461,6 @@ size_t ImageWriter::ImageInfo::CreateImageSections(ImageSection* out_sections) c bin_slot_offsets_[kBinArtMethodClean], bin_slot_sizes_[kBinArtMethodClean] + bin_slot_sizes_[kBinArtMethodDirty]); - // IMT section. - ImageSection* imt_section = &out_sections[ImageHeader::kSectionImTables]; - *imt_section = ImageSection(bin_slot_offsets_[kBinImTable], bin_slot_sizes_[kBinImTable]); - // Conflict tables section. ImageSection* imt_conflict_tables_section = &out_sections[ImageHeader::kSectionIMTConflictTables]; *imt_conflict_tables_section = ImageSection(bin_slot_offsets_[kBinIMTConflictTable], @@ -1613,13 +1585,6 @@ class FixupRootVisitor : public RootVisitor { ImageWriter* const image_writer_; }; -void ImageWriter::CopyAndFixupImTable(ImTable* orig, ImTable* copy) { - for (size_t i = 0; i < ImTable::kSize; ++i) { - ArtMethod* method = orig->Get(i, target_ptr_size_); - copy->Set(i, NativeLocationInImage(method), target_ptr_size_); - } -} - void ImageWriter::CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy) { const size_t count = orig->NumEntries(target_ptr_size_); for (size_t i = 0; i < count; ++i) { @@ -1677,12 +1642,6 @@ void ImageWriter::CopyAndFixupNativeData(size_t oat_index) { case kNativeObjectRelocationTypeDexCacheArray: // Nothing to copy here, everything is done in FixupDexCache(). break; - case kNativeObjectRelocationTypeIMTable: { - ImTable* orig_imt = reinterpret_cast(pair.first); - ImTable* dest_imt = reinterpret_cast(dest); - CopyAndFixupImTable(orig_imt, dest_imt); - break; - } case kNativeObjectRelocationTypeIMTConflictTable: { auto* orig_table = reinterpret_cast(pair.first); CopyAndFixupImtConflictTable( @@ -1891,25 +1850,13 @@ uintptr_t ImageWriter::NativeOffsetInImage(void* obj) { } template -std::string PrettyPrint(T* ptr) SHARED_REQUIRES(Locks::mutator_lock_) { - std::ostringstream oss; - oss << ptr; - return oss.str(); -} - -template <> -std::string PrettyPrint(ArtMethod* method) SHARED_REQUIRES(Locks::mutator_lock_) { - return PrettyMethod(method); -} - -template T* ImageWriter::NativeLocationInImage(T* obj) { if (obj == nullptr || IsInBootImage(obj)) { return obj; } else { auto it = native_object_relocations_.find(obj); - CHECK(it != native_object_relocations_.end()) << obj << " " << PrettyPrint(obj) - << " spaces " << Runtime::Current()->GetHeap()->DumpSpaces(); + CHECK(it != native_object_relocations_.end()) << obj << " spaces " + << Runtime::Current()->GetHeap()->DumpSpaces(); const NativeObjectRelocation& relocation = it->second; ImageInfo& image_info = GetImageInfo(relocation.oat_index); return reinterpret_cast(image_info.image_begin_ + relocation.offset); @@ -2263,8 +2210,6 @@ ImageWriter::Bin ImageWriter::BinTypeForNativeRelocationType(NativeObjectRelocat return kBinDexCacheArray; case kNativeObjectRelocationTypeRuntimeMethod: return kBinRuntimeMethod; - case kNativeObjectRelocationTypeIMTable: - return kBinImTable; case kNativeObjectRelocationTypeIMTConflictTable: return kBinIMTConflictTable; } diff --git a/compiler/image_writer.h b/compiler/image_writer.h index 1efdc22c0..51976c511 100644 --- a/compiler/image_writer.h +++ b/compiler/image_writer.h @@ -169,8 +169,6 @@ class ImageWriter FINAL { // ArtMethods may be dirty if the class has native methods or a declaring class that isn't // initialized. kBinArtMethodDirty, - // IMT (clean) - kBinImTable, // Conflict tables (clean). kBinIMTConflictTable, // Runtime methods (always clean, do not have a length prefix array). @@ -193,7 +191,6 @@ class ImageWriter FINAL { kNativeObjectRelocationTypeArtMethodDirty, kNativeObjectRelocationTypeArtMethodArrayDirty, kNativeObjectRelocationTypeRuntimeMethod, - kNativeObjectRelocationTypeIMTable, kNativeObjectRelocationTypeIMTConflictTable, kNativeObjectRelocationTypeDexCacheArray, }; @@ -404,7 +401,6 @@ class ImageWriter FINAL { void CopyAndFixupObject(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_); void CopyAndFixupMethod(ArtMethod* orig, ArtMethod* copy, const ImageInfo& image_info) SHARED_REQUIRES(Locks::mutator_lock_); - void CopyAndFixupImTable(ImTable* orig, ImTable* copy) SHARED_REQUIRES(Locks::mutator_lock_); void CopyAndFixupImtConflictTable(ImtConflictTable* orig, ImtConflictTable* copy) SHARED_REQUIRES(Locks::mutator_lock_); void FixupClass(mirror::Class* orig, mirror::Class* copy) @@ -437,8 +433,6 @@ class ImageWriter FINAL { size_t oat_index) SHARED_REQUIRES(Locks::mutator_lock_); - void TryAssignImTableOffset(ImTable* imt, size_t oat_index) SHARED_REQUIRES(Locks::mutator_lock_); - // Assign the offset for an IMT conflict table. Does nothing if the table already has a native // relocation. void TryAssignConflictTableOffset(ImtConflictTable* table, size_t oat_index) diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 663c68a17..5316d59bf 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -1889,6 +1889,8 @@ void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) LocationSummary* locations = invoke->GetLocations(); Register temp = locations->GetTemp(0).AsRegister(); Register hidden_reg = locations->GetTemp(1).AsRegister(); + uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset( + invoke->GetImtIndex() % mirror::Class::kImtSize, kArmPointerSize).Uint32Value(); Location receiver = locations->InAt(0); uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); @@ -1914,14 +1916,10 @@ void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) // intact/accessible until the end of the marking phase (the // concurrent copying collector may not in the future). __ MaybeUnpoisonHeapReference(temp); - __ LoadFromOffset(kLoadWord, temp, temp, - mirror::Class::ImtPtrOffset(kArmPointerSize).Uint32Value()); - uint32_t method_offset = static_cast(ImTable::OffsetOfElement( - invoke->GetImtIndex() % ImTable::kSize, kArmPointerSize)); // temp = temp->GetImtEntryAt(method_offset); - __ LoadFromOffset(kLoadWord, temp, temp, method_offset); uint32_t entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmWordSize).Int32Value(); + __ LoadFromOffset(kLoadWord, temp, temp, method_offset); // LR = temp->GetEntryPoint(); __ LoadFromOffset(kLoadWord, LR, temp, entry_point); // LR(); @@ -6961,11 +6959,8 @@ void InstructionCodeGeneratorARM::VisitClassTableGet(HClassTableGet* instruction method_offset = mirror::Class::EmbeddedVTableEntryOffset( instruction->GetIndex(), kArmPointerSize).SizeValue(); } else { - __ LoadFromOffset(kLoadWord, locations->Out().AsRegister(), - locations->InAt(0).AsRegister(), - mirror::Class::ImtPtrOffset(kArmPointerSize).Uint32Value()); - method_offset = static_cast(ImTable::OffsetOfElement( - instruction->GetIndex() % ImTable::kSize, kArmPointerSize)); + method_offset = mirror::Class::EmbeddedImTableEntryOffset( + instruction->GetIndex() % mirror::Class::kImtSize, kArmPointerSize).Uint32Value(); } __ LoadFromOffset(kLoadWord, locations->Out().AsRegister(), diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index c8d33d574..fc2c2c34a 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -3506,6 +3506,8 @@ void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invok // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError. LocationSummary* locations = invoke->GetLocations(); Register temp = XRegisterFrom(locations->GetTemp(0)); + uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset( + invoke->GetImtIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value(); Location receiver = locations->InAt(0); Offset class_offset = mirror::Object::ClassOffset(); Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64WordSize); @@ -3535,10 +3537,6 @@ void InstructionCodeGeneratorARM64::VisitInvokeInterface(HInvokeInterface* invok // intact/accessible until the end of the marking phase (the // concurrent copying collector may not in the future). GetAssembler()->MaybeUnpoisonHeapReference(temp.W()); - __ Ldr(temp, - MemOperand(temp, mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value())); - uint32_t method_offset = static_cast(ImTable::OffsetOfElement( - invoke->GetImtIndex() % ImTable::kSize, kArm64PointerSize)); // temp = temp->GetImtEntryAt(method_offset); __ Ldr(temp, MemOperand(temp, method_offset)); // lr = temp->GetEntryPoint(); @@ -5355,10 +5353,8 @@ void InstructionCodeGeneratorARM64::VisitClassTableGet(HClassTableGet* instructi method_offset = mirror::Class::EmbeddedVTableEntryOffset( instruction->GetIndex(), kArm64PointerSize).SizeValue(); } else { - __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)), - mirror::Class::ImtPtrOffset(kArm64PointerSize).Uint32Value())); - method_offset = static_cast(ImTable::OffsetOfElement( - instruction->GetIndex() % ImTable::kSize, kArm64PointerSize)); + method_offset = mirror::Class::EmbeddedImTableEntryOffset( + instruction->GetIndex() % mirror::Class::kImtSize, kArm64PointerSize).Uint32Value(); } __ Ldr(XRegisterFrom(locations->Out()), MemOperand(XRegisterFrom(locations->InAt(0)), method_offset)); diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc index 810db2088..4d44c18dc 100644 --- a/compiler/optimizing/code_generator_mips.cc +++ b/compiler/optimizing/code_generator_mips.cc @@ -3698,6 +3698,8 @@ void LocationsBuilderMIPS::VisitInvokeInterface(HInvokeInterface* invoke) { void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke) { // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError. Register temp = invoke->GetLocations()->GetTemp(0).AsRegister(); + uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset( + invoke->GetImtIndex() % mirror::Class::kImtSize, kMipsPointerSize).Uint32Value(); Location receiver = invoke->GetLocations()->InAt(0); uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMipsWordSize); @@ -3714,10 +3716,6 @@ void InstructionCodeGeneratorMIPS::VisitInvokeInterface(HInvokeInterface* invoke __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister(), class_offset); } codegen_->MaybeRecordImplicitNullCheck(invoke); - __ LoadFromOffset(kLoadWord, temp, temp, - mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value()); - uint32_t method_offset = static_cast(ImTable::OffsetOfElement( - invoke->GetImtIndex() % ImTable::kSize, kMipsPointerSize)); // temp = temp->GetImtEntryAt(method_offset); __ LoadFromOffset(kLoadWord, temp, temp, method_offset); // T9 = temp->GetEntryPoint(); @@ -5167,12 +5165,8 @@ void InstructionCodeGeneratorMIPS::VisitClassTableGet(HClassTableGet* instructio method_offset = mirror::Class::EmbeddedVTableEntryOffset( instruction->GetIndex(), kMipsPointerSize).SizeValue(); } else { - __ LoadFromOffset(kLoadWord, - locations->Out().AsRegister(), - locations->InAt(0).AsRegister(), - mirror::Class::ImtPtrOffset(kMipsPointerSize).Uint32Value()); - method_offset = static_cast(ImTable::OffsetOfElement( - instruction->GetIndex() % ImTable::kSize, kMipsPointerSize)); + method_offset = mirror::Class::EmbeddedImTableEntryOffset( + instruction->GetIndex() % mirror::Class::kImtSize, kMipsPointerSize).Uint32Value(); } __ LoadFromOffset(kLoadWord, locations->Out().AsRegister(), diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc index 9f2664c0a..2e78884da 100644 --- a/compiler/optimizing/code_generator_mips64.cc +++ b/compiler/optimizing/code_generator_mips64.cc @@ -2932,6 +2932,8 @@ void LocationsBuilderMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) { void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invoke) { // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError. GpuRegister temp = invoke->GetLocations()->GetTemp(0).AsRegister(); + uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset( + invoke->GetImtIndex() % mirror::Class::kImtSize, kMips64PointerSize).Uint32Value(); Location receiver = invoke->GetLocations()->InAt(0); uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); Offset entry_point = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kMips64DoublewordSize); @@ -2948,10 +2950,6 @@ void InstructionCodeGeneratorMIPS64::VisitInvokeInterface(HInvokeInterface* invo __ LoadFromOffset(kLoadUnsignedWord, temp, receiver.AsRegister(), class_offset); } codegen_->MaybeRecordImplicitNullCheck(invoke); - __ LoadFromOffset(kLoadDoubleword, temp, temp, - mirror::Class::ImtPtrOffset(kMips64PointerSize).Uint32Value()); - uint32_t method_offset = static_cast(ImTable::OffsetOfElement( - invoke->GetImtIndex() % ImTable::kSize, kMips64PointerSize)); // temp = temp->GetImtEntryAt(method_offset); __ LoadFromOffset(kLoadDoubleword, temp, temp, method_offset); // T9 = temp->GetEntryPoint(); diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index be20f1f7c..126161953 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -2027,6 +2027,8 @@ void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) LocationSummary* locations = invoke->GetLocations(); Register temp = locations->GetTemp(0).AsRegister(); XmmRegister hidden_reg = locations->GetTemp(1).AsFpuRegister(); + uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset( + invoke->GetImtIndex() % mirror::Class::kImtSize, kX86PointerSize).Uint32Value(); Location receiver = locations->InAt(0); uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); @@ -2053,12 +2055,7 @@ void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke) // intact/accessible until the end of the marking phase (the // concurrent copying collector may not in the future). __ MaybeUnpoisonHeapReference(temp); - // temp = temp->GetAddressOfIMT() - __ movl(temp, - Address(temp, mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value())); // temp = temp->GetImtEntryAt(method_offset); - uint32_t method_offset = static_cast(ImTable::OffsetOfElement( - invoke->GetImtIndex() % ImTable::kSize, kX86PointerSize)); __ movl(temp, Address(temp, method_offset)); // call temp->GetEntryPoint(); __ call(Address(temp, @@ -4078,12 +4075,8 @@ void InstructionCodeGeneratorX86::VisitClassTableGet(HClassTableGet* instruction method_offset = mirror::Class::EmbeddedVTableEntryOffset( instruction->GetIndex(), kX86PointerSize).SizeValue(); } else { - __ movl(locations->InAt(0).AsRegister(), - Address(locations->InAt(0).AsRegister(), - mirror::Class::ImtPtrOffset(kX86PointerSize).Uint32Value())); - // temp = temp->GetImtEntryAt(method_offset); - method_offset = static_cast(ImTable::OffsetOfElement( - instruction->GetIndex() % ImTable::kSize, kX86PointerSize)); + method_offset = mirror::Class::EmbeddedImTableEntryOffset( + instruction->GetIndex() % mirror::Class::kImtSize, kX86PointerSize).Uint32Value(); } __ movl(locations->Out().AsRegister(), Address(locations->InAt(0).AsRegister(), method_offset)); diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index cac33cddb..5e30203b3 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -2257,6 +2257,8 @@ void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invo LocationSummary* locations = invoke->GetLocations(); CpuRegister temp = locations->GetTemp(0).AsRegister(); CpuRegister hidden_reg = locations->GetTemp(1).AsRegister(); + uint32_t method_offset = mirror::Class::EmbeddedImTableEntryOffset( + invoke->GetImtIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value(); Location receiver = locations->InAt(0); size_t class_offset = mirror::Object::ClassOffset().SizeValue(); @@ -2282,12 +2284,6 @@ void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invo // intact/accessible until the end of the marking phase (the // concurrent copying collector may not in the future). __ MaybeUnpoisonHeapReference(temp); - // temp = temp->GetAddressOfIMT() - __ movq(temp, - Address(temp, mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value())); - // temp = temp->GetImtEntryAt(method_offset); - uint32_t method_offset = static_cast(ImTable::OffsetOfElement( - invoke->GetImtIndex() % ImTable::kSize, kX86_64PointerSize)); // temp = temp->GetImtEntryAt(method_offset); __ movq(temp, Address(temp, method_offset)); // call temp->GetEntryPoint(); @@ -4011,11 +4007,8 @@ void InstructionCodeGeneratorX86_64::VisitClassTableGet(HClassTableGet* instruct method_offset = mirror::Class::EmbeddedVTableEntryOffset( instruction->GetIndex(), kX86_64PointerSize).SizeValue(); } else { - __ movq(locations->Out().AsRegister(), - Address(locations->InAt(0).AsRegister(), - mirror::Class::ImtPtrOffset(kX86_64PointerSize).Uint32Value())); - method_offset = static_cast(ImTable::OffsetOfElement( - instruction->GetIndex() % ImTable::kSize, kX86_64PointerSize)); + method_offset = mirror::Class::EmbeddedImTableEntryOffset( + instruction->GetIndex() % mirror::Class::kImtSize, kX86_64PointerSize).Uint32Value(); } __ movq(locations->Out().AsRegister(), Address(locations->InAt(0).AsRegister(), method_offset)); diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 8f2db3d1d..c67b2d5fe 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -656,8 +656,8 @@ bool HInliner::TryInlinePolymorphicCallToSameTarget(HInvoke* invoke_instruction, } ArtMethod* new_method = nullptr; if (invoke_instruction->IsInvokeInterface()) { - new_method = ic.GetTypeAt(i)->GetImt(pointer_size)->Get( - method_index % ImTable::kSize, pointer_size); + new_method = ic.GetTypeAt(i)->GetEmbeddedImTableEntry( + method_index % mirror::Class::kImtSize, pointer_size); if (new_method->IsRuntimeMethod()) { // Bail out as soon as we see a conflict trampoline in one of the target's // interface table. diff --git a/patchoat/patchoat.cc b/patchoat/patchoat.cc index 5bb61bb82..0a7ffda3b 100644 --- a/patchoat/patchoat.cc +++ b/patchoat/patchoat.cc @@ -494,17 +494,6 @@ void PatchOat::PatchArtMethods(const ImageHeader* image_header) { image_header->VisitPackedArtMethods(&visitor, heap_->Begin(), pointer_size); } -void PatchOat::PatchImTables(const ImageHeader* image_header) { - const size_t pointer_size = InstructionSetPointerSize(isa_); - // We can safely walk target image since the conflict tables are independent. - image_header->VisitPackedImTables( - [this](ArtMethod* method) { - return RelocatedAddressOfPointer(method); - }, - image_->Begin(), - pointer_size); -} - void PatchOat::PatchImtConflictTables(const ImageHeader* image_header) { const size_t pointer_size = InstructionSetPointerSize(isa_); // We can safely walk target image since the conflict tables are independent. @@ -647,7 +636,6 @@ bool PatchOat::PatchImage(bool primary_image) { PatchArtFields(image_header); PatchArtMethods(image_header); - PatchImTables(image_header); PatchImtConflictTables(image_header); PatchInternedStrings(image_header); PatchClassTable(image_header); diff --git a/patchoat/patchoat.h b/patchoat/patchoat.h index 61ec695d8..3ef837fde 100644 --- a/patchoat/patchoat.h +++ b/patchoat/patchoat.h @@ -117,7 +117,6 @@ class PatchOat { bool PatchImage(bool primary_image) SHARED_REQUIRES(Locks::mutator_lock_); void PatchArtFields(const ImageHeader* image_header) SHARED_REQUIRES(Locks::mutator_lock_); void PatchArtMethods(const ImageHeader* image_header) SHARED_REQUIRES(Locks::mutator_lock_); - void PatchImTables(const ImageHeader* image_header) SHARED_REQUIRES(Locks::mutator_lock_); void PatchImtConflictTables(const ImageHeader* image_header) SHARED_REQUIRES(Locks::mutator_lock_); void PatchInternedStrings(const ImageHeader* image_header) diff --git a/runtime/art_method.h b/runtime/art_method.h index 90b2406a1..2b025f8c6 100644 --- a/runtime/art_method.h +++ b/runtime/art_method.h @@ -99,22 +99,6 @@ class ImtConflictTable { return GetMethod(index * kMethodCount + kMethodImplementation, pointer_size); } - // Return true if two conflict tables are the same. - bool Equals(ImtConflictTable* other, size_t pointer_size) const { - size_t num = NumEntries(pointer_size); - if (num != other->NumEntries(pointer_size)) { - return false; - } - for (size_t i = 0; i < num; ++i) { - if (GetInterfaceMethod(i, pointer_size) != other->GetInterfaceMethod(i, pointer_size) || - GetImplementationMethod(i, pointer_size) != - other->GetImplementationMethod(i, pointer_size)) { - return false; - } - } - return true; - } - // Visit all of the entries. // NO_THREAD_SAFETY_ANALYSIS for calling with held locks. Visitor is passed a pair of ArtMethod* // and also returns one. The order is . diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index cb34d8a12..fe7448fa2 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -857,13 +857,11 @@ static void SanityCheckObjectsCallback(mirror::Object* obj, void* arg ATTRIBUTE_ if (vtable != nullptr) { SanityCheckArtMethodPointerArray(vtable, nullptr, pointer_size, image_spaces); } - if (klass->ShouldHaveImt()) { - ImTable* imt = klass->GetImt(pointer_size); - for (size_t i = 0; i < ImTable::kSize; ++i) { - SanityCheckArtMethod(imt->Get(i, pointer_size), nullptr, image_spaces); + if (klass->ShouldHaveEmbeddedImtAndVTable()) { + for (size_t i = 0; i < mirror::Class::kImtSize; ++i) { + SanityCheckArtMethod( + klass->GetEmbeddedImTableEntry(i, pointer_size), nullptr, image_spaces); } - } - if (klass->ShouldHaveEmbeddedVTable()) { for (int32_t i = 0; i < klass->GetEmbeddedVTableLength(); ++i) { SanityCheckArtMethod(klass->GetEmbeddedVTableEntry(i, pointer_size), nullptr, image_spaces); } @@ -3458,11 +3456,16 @@ mirror::Class* ClassLinker::CreateArrayClass(Thread* self, const char* descripto new_class->SetClassFlags(mirror::kClassFlagObjectArray); } mirror::Class::SetStatus(new_class, mirror::Class::kStatusLoaded, self); - new_class->PopulateEmbeddedVTable(image_pointer_size_); + { + ArtMethod* imt[mirror::Class::kImtSize]; + std::fill_n(imt, arraysize(imt), Runtime::Current()->GetImtUnimplementedMethod()); + new_class->PopulateEmbeddedImtAndVTable(imt, image_pointer_size_); + } mirror::Class::SetStatus(new_class, mirror::Class::kStatusInitialized, self); // don't need to set new_class->SetObjectSize(..) // because Object::SizeOf delegates to Array::SizeOf + // All arrays have java/lang/Cloneable and java/io/Serializable as // interfaces. We need to set that up here, so that stuff like // "instanceof" works right. @@ -5033,11 +5036,9 @@ bool ClassLinker::LinkClass(Thread* self, if (!LinkSuperClass(klass)) { return false; } - ArtMethod* imt_data[ImTable::kSize]; - // If there are any new conflicts compared to super class. - bool new_conflict = false; - std::fill_n(imt_data, arraysize(imt_data), Runtime::Current()->GetImtUnimplementedMethod()); - if (!LinkMethods(self, klass, interfaces, &new_conflict, imt_data)) { + ArtMethod* imt[mirror::Class::kImtSize]; + std::fill_n(imt, arraysize(imt), Runtime::Current()->GetImtUnimplementedMethod()); + if (!LinkMethods(self, klass, interfaces, imt)) { return false; } if (!LinkInstanceFields(self, klass)) { @@ -5050,45 +5051,15 @@ bool ClassLinker::LinkClass(Thread* self, CreateReferenceInstanceOffsets(klass); CHECK_EQ(mirror::Class::kStatusLoaded, klass->GetStatus()); - ImTable* imt = nullptr; - if (klass->ShouldHaveImt()) { - // If there are any new conflicts compared to the super class we can not make a copy. There - // can be cases where both will have a conflict method at the same slot without having the same - // set of conflicts. In this case, we can not share the IMT since the conflict table slow path - // will possibly create a table that is incorrect for either of the classes. - // Same IMT with new_conflict does not happen very often. - if (!new_conflict && klass->HasSuperClass() && klass->GetSuperClass()->ShouldHaveImt()) { - ImTable* super_imt = klass->GetSuperClass()->GetImt(image_pointer_size_); - bool imt_equals = true; - for (size_t i = 0; i < ImTable::kSize && imt_equals; ++i) { - imt_equals = imt_equals && (super_imt->Get(i, image_pointer_size_) == imt_data[i]); - } - if (imt_equals) { - imt = super_imt; - } - } - if (imt == nullptr) { - LinearAlloc* allocator = GetAllocatorForClassLoader(klass->GetClassLoader()); - imt = reinterpret_cast( - allocator->Alloc(self, ImTable::SizeInBytes(image_pointer_size_))); - if (imt == nullptr) { - return false; - } - imt->Populate(imt_data, image_pointer_size_); - } - } - if (!klass->IsTemp() || (!init_done_ && klass->GetClassSize() == class_size)) { // We don't need to retire this class as it has no embedded tables or it was created the // correct size during class linker initialization. CHECK_EQ(klass->GetClassSize(), class_size) << PrettyDescriptor(klass.Get()); - if (klass->ShouldHaveEmbeddedVTable()) { - klass->PopulateEmbeddedVTable(image_pointer_size_); - } - if (klass->ShouldHaveImt()) { - klass->SetImt(imt, image_pointer_size_); + if (klass->ShouldHaveEmbeddedImtAndVTable()) { + klass->PopulateEmbeddedImtAndVTable(imt, image_pointer_size_); } + // This will notify waiters on klass that saw the not yet resolved // class in the class_table_ during EnsureResolved. mirror::Class::SetStatus(klass, mirror::Class::kStatusResolved, self); @@ -5480,7 +5451,6 @@ bool ClassLinker::LinkSuperClass(Handle klass) { bool ClassLinker::LinkMethods(Thread* self, Handle klass, Handle> interfaces, - bool* out_new_conflict, ArtMethod** out_imt) { self->AllowThreadSuspension(); // A map from vtable indexes to the method they need to be updated to point to. Used because we @@ -5492,7 +5462,7 @@ bool ClassLinker::LinkMethods(Thread* self, // any vtable entries with new default method implementations. return SetupInterfaceLookupTable(self, klass, interfaces) && LinkVirtualMethods(self, klass, /*out*/ &default_translations) - && LinkInterfaceMethods(self, klass, default_translations, out_new_conflict, out_imt); + && LinkInterfaceMethods(self, klass, default_translations, out_imt); } // Comparator for name and signature of a method, used in finding overriding methods. Implementation @@ -5650,7 +5620,7 @@ bool ClassLinker::LinkVirtualMethods( StackHandleScope<2> hs(self); Handle super_class(hs.NewHandle(klass->GetSuperClass())); MutableHandle vtable; - if (super_class->ShouldHaveEmbeddedVTable()) { + if (super_class->ShouldHaveEmbeddedImtAndVTable()) { vtable = hs.NewHandle(AllocPointerArray(self, max_count)); if (UNLIKELY(vtable.Get() == nullptr)) { self->AssertPendingOOMException(); @@ -6050,7 +6020,6 @@ ArtMethod* ClassLinker::AddMethodToConflictTable(mirror::Class* klass, void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method, ArtMethod* imt_conflict_method, ArtMethod* current_method, - /*out*/bool* new_conflict, /*out*/ArtMethod** imt_ref) { // Place method in imt if entry is empty, place conflict otherwise. if (*imt_ref == unimplemented_method) { @@ -6067,82 +6036,40 @@ void ClassLinker::SetIMTRef(ArtMethod* unimplemented_method, *imt_ref = current_method; } else { *imt_ref = imt_conflict_method; - *new_conflict = true; } } else { // Place the default conflict method. Note that there may be an existing conflict // method in the IMT, but it could be one tailored to the super class, with a // specific ImtConflictTable. *imt_ref = imt_conflict_method; - *new_conflict = true; } } void ClassLinker::FillIMTAndConflictTables(mirror::Class* klass) { - DCHECK(klass->ShouldHaveImt()) << PrettyClass(klass); + DCHECK(klass->ShouldHaveEmbeddedImtAndVTable()) << PrettyClass(klass); DCHECK(!klass->IsTemp()) << PrettyClass(klass); - ArtMethod* imt_data[ImTable::kSize]; + ArtMethod* imt[mirror::Class::kImtSize]; Runtime* const runtime = Runtime::Current(); ArtMethod* const unimplemented_method = runtime->GetImtUnimplementedMethod(); ArtMethod* const conflict_method = runtime->GetImtConflictMethod(); - std::fill_n(imt_data, arraysize(imt_data), unimplemented_method); + std::fill_n(imt, arraysize(imt), unimplemented_method); if (klass->GetIfTable() != nullptr) { - bool new_conflict = false; FillIMTFromIfTable(klass->GetIfTable(), unimplemented_method, conflict_method, klass, - /*create_conflict_tables*/true, - /*ignore_copied_methods*/false, - &new_conflict, - &imt_data[0]); + true, + false, + &imt[0]); } - if (!klass->ShouldHaveImt()) { - return; - } - // Compare the IMT with the super class including the conflict methods. If they are equivalent, - // we can just use the same pointer. - ImTable* imt = nullptr; - mirror::Class* super_class = klass->GetSuperClass(); - if (super_class != nullptr && super_class->ShouldHaveImt()) { - ImTable* super_imt = super_class->GetImt(image_pointer_size_); - bool same = true; - for (size_t i = 0; same && i < ImTable::kSize; ++i) { - ArtMethod* method = imt_data[i]; - ArtMethod* super_method = super_imt->Get(i, image_pointer_size_); - if (method != super_method) { - bool is_conflict_table = method->IsRuntimeMethod() && - method != unimplemented_method && - method != conflict_method; - // Verify conflict contents. - bool super_conflict_table = super_method->IsRuntimeMethod() && - super_method != unimplemented_method && - super_method != conflict_method; - if (!is_conflict_table || !super_conflict_table) { - same = false; - } else { - ImtConflictTable* table1 = method->GetImtConflictTable(image_pointer_size_); - ImtConflictTable* table2 = super_method->GetImtConflictTable(image_pointer_size_); - same = same && table1->Equals(table2, image_pointer_size_); - } - } - } - if (same) { - imt = super_imt; - } - } - if (imt == nullptr) { - imt = klass->GetImt(image_pointer_size_); - DCHECK(imt != nullptr); - imt->Populate(imt_data, image_pointer_size_); - } else { - klass->SetImt(imt, image_pointer_size_); + for (size_t i = 0; i < mirror::Class::kImtSize; ++i) { + klass->SetEmbeddedImTableEntry(i, imt[i], image_pointer_size_); } } static inline uint32_t GetIMTIndex(ArtMethod* interface_method) SHARED_REQUIRES(Locks::mutator_lock_) { - return interface_method->GetDexMethodIndex() % ImTable::kSize; + return interface_method->GetDexMethodIndex() % mirror::Class::kImtSize; } ImtConflictTable* ClassLinker::CreateImtConflictTable(size_t count, @@ -6164,9 +6091,8 @@ void ClassLinker::FillIMTFromIfTable(mirror::IfTable* if_table, mirror::Class* klass, bool create_conflict_tables, bool ignore_copied_methods, - /*out*/bool* new_conflict, - /*out*/ArtMethod** imt) { - uint32_t conflict_counts[ImTable::kSize] = {}; + ArtMethod** imt) { + uint32_t conflict_counts[mirror::Class::kImtSize] = {}; for (size_t i = 0, length = if_table->Count(); i < length; ++i) { mirror::Class* interface = if_table->GetInterface(i); const size_t num_virtuals = interface->NumVirtualMethods(); @@ -6208,7 +6134,6 @@ void ClassLinker::FillIMTFromIfTable(mirror::IfTable* if_table, SetIMTRef(unimplemented_method, imt_conflict_method, implementation_method, - /*out*/new_conflict, /*out*/&imt[imt_index]); } } @@ -6216,7 +6141,7 @@ void ClassLinker::FillIMTFromIfTable(mirror::IfTable* if_table, if (create_conflict_tables) { // Create the conflict tables. LinearAlloc* linear_alloc = GetAllocatorForClassLoader(klass->GetClassLoader()); - for (size_t i = 0; i < ImTable::kSize; ++i) { + for (size_t i = 0; i < mirror::Class::kImtSize; ++i) { size_t conflicts = conflict_counts[i]; if (imt[i] == imt_conflict_method) { ImtConflictTable* new_table = CreateImtConflictTable(conflicts, linear_alloc); @@ -6503,14 +6428,12 @@ static void SanityCheckVTable(Handle klass, uint32_t pointer_size void ClassLinker::FillImtFromSuperClass(Handle klass, ArtMethod* unimplemented_method, ArtMethod* imt_conflict_method, - bool* new_conflict, ArtMethod** imt) { DCHECK(klass->HasSuperClass()); mirror::Class* super_class = klass->GetSuperClass(); - if (super_class->ShouldHaveImt()) { - ImTable* super_imt = super_class->GetImt(image_pointer_size_); - for (size_t i = 0; i < ImTable::kSize; ++i) { - imt[i] = super_imt->Get(i, image_pointer_size_); + if (super_class->ShouldHaveEmbeddedImtAndVTable()) { + for (size_t i = 0; i < mirror::Class::kImtSize; ++i) { + imt[i] = super_class->GetEmbeddedImTableEntry(i, image_pointer_size_); } } else { // No imt in the super class, need to reconstruct from the iftable. @@ -6523,7 +6446,6 @@ void ClassLinker::FillImtFromSuperClass(Handle klass, klass.Get(), /*create_conflict_table*/false, /*ignore_copied_methods*/true, - /*out*/new_conflict, /*out*/imt); } } @@ -6534,7 +6456,6 @@ bool ClassLinker::LinkInterfaceMethods( Thread* self, Handle klass, const std::unordered_map& default_translations, - bool* out_new_conflict, ArtMethod** out_imt) { StackHandleScope<3> hs(self); Runtime* const runtime = Runtime::Current(); @@ -6570,7 +6491,6 @@ bool ClassLinker::LinkInterfaceMethods( FillImtFromSuperClass(klass, unimplemented_method, imt_conflict_method, - out_new_conflict, out_imt); } // Allocate method arrays before since we don't want miss visiting miranda method roots due to @@ -6702,7 +6622,6 @@ bool ClassLinker::LinkInterfaceMethods( SetIMTRef(unimplemented_method, imt_conflict_method, vtable_method, - /*out*/out_new_conflict, /*out*/imt_ptr); } break; @@ -6845,7 +6764,6 @@ bool ClassLinker::LinkInterfaceMethods( SetIMTRef(unimplemented_method, imt_conflict_method, current_method, - /*out*/out_new_conflict, /*out*/imt_ptr); } } @@ -7045,7 +6963,7 @@ bool ClassLinker::LinkInterfaceMethods( } // Fix up IMT next - for (size_t i = 0; i < ImTable::kSize; ++i) { + for (size_t i = 0; i < mirror::Class::kImtSize; ++i) { auto it = move_table.find(out_imt[i]); if (it != move_table.end()) { out_imt[i] = it->second; diff --git a/runtime/class_linker.h b/runtime/class_linker.h index d6822c522..ca5af1997 100644 --- a/runtime/class_linker.h +++ b/runtime/class_linker.h @@ -833,7 +833,6 @@ class ClassLinker { bool LinkMethods(Thread* self, Handle klass, Handle> interfaces, - bool* out_new_conflict, ArtMethod** out_imt) SHARED_REQUIRES(Locks::mutator_lock_); @@ -969,20 +968,19 @@ class ClassLinker { // * kDefaultConflict - Conflicting method implementations were found when searching for // target_method. The value of *out_default_method is null. DefaultMethodSearchResult FindDefaultMethodImplementation( - Thread* self, - ArtMethod* target_method, - Handle klass, - /*out*/ArtMethod** out_default_method) const + Thread* self, + ArtMethod* target_method, + Handle klass, + /*out*/ArtMethod** out_default_method) const SHARED_REQUIRES(Locks::mutator_lock_); // Sets the imt entries and fixes up the vtable for the given class by linking all the interface // methods. See LinkVirtualMethods for an explanation of what default_translations is. bool LinkInterfaceMethods( - Thread* self, - Handle klass, - const std::unordered_map& default_translations, - bool* out_new_conflict, - ArtMethod** out_imt) + Thread* self, + Handle klass, + const std::unordered_map& default_translations, + ArtMethod** out_imt) SHARED_REQUIRES(Locks::mutator_lock_); bool LinkStaticFields(Thread* self, Handle klass, size_t* class_size) @@ -1098,7 +1096,6 @@ class ClassLinker { void SetIMTRef(ArtMethod* unimplemented_method, ArtMethod* imt_conflict_method, ArtMethod* current_method, - /*out*/bool* new_conflict, /*out*/ArtMethod** imt_ref) SHARED_REQUIRES(Locks::mutator_lock_); void FillIMTFromIfTable(mirror::IfTable* if_table, @@ -1107,13 +1104,11 @@ class ClassLinker { mirror::Class* klass, bool create_conflict_tables, bool ignore_copied_methods, - /*out*/bool* new_conflict, - /*out*/ArtMethod** imt) SHARED_REQUIRES(Locks::mutator_lock_); + ArtMethod** imt) SHARED_REQUIRES(Locks::mutator_lock_); void FillImtFromSuperClass(Handle klass, ArtMethod* unimplemented_method, ArtMethod* imt_conflict_method, - bool* new_conflict, ArtMethod** imt) SHARED_REQUIRES(Locks::mutator_lock_); std::vector boot_class_path_; diff --git a/runtime/class_linker_test.cc b/runtime/class_linker_test.cc index 9b59f2bba..488826b6c 100644 --- a/runtime/class_linker_test.cc +++ b/runtime/class_linker_test.cc @@ -148,8 +148,7 @@ class ClassLinkerTest : public CommonRuntimeTest { EXPECT_EQ(0U, array->NumInstanceFields()); EXPECT_EQ(0U, array->NumStaticFields()); EXPECT_EQ(2U, array->NumDirectInterfaces()); - EXPECT_FALSE(array->ShouldHaveImt()); - EXPECT_TRUE(array->ShouldHaveEmbeddedVTable()); + EXPECT_TRUE(array->ShouldHaveEmbeddedImtAndVTable()); EXPECT_EQ(2, array->GetIfTableCount()); ASSERT_TRUE(array->GetIfTable() != nullptr); mirror::Class* direct_interface0 = mirror::Class::GetDirectInterface(self, array, 0); diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h index 916ca2931..fc6257302 100644 --- a/runtime/entrypoints/entrypoint_utils-inl.h +++ b/runtime/entrypoints/entrypoint_utils-inl.h @@ -559,10 +559,9 @@ inline ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object** this_ } } case kInterface: { - uint32_t imt_index = resolved_method->GetDexMethodIndex() % ImTable::kSize; - size_t pointer_size = class_linker->GetImagePointerSize(); - ArtMethod* imt_method = (*this_object)->GetClass()->GetImt(pointer_size)-> - Get(imt_index, pointer_size); + uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize; + ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry( + imt_index, class_linker->GetImagePointerSize()); if (!imt_method->IsRuntimeMethod()) { if (kIsDebugBuild) { mirror::Class* klass = (*this_object)->GetClass(); diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc index 7175d5436..03771aa80 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc @@ -2169,13 +2169,13 @@ extern "C" TwoWordReturn artInvokeInterfaceTrampoline(uint32_t deadbeef ATTRIBUT dex_method_idx, sizeof(void*)); DCHECK(interface_method != nullptr) << dex_method_idx << " " << PrettyMethod(caller_method); ArtMethod* method = nullptr; - ImTable* imt = cls->GetImt(sizeof(void*)); if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) { // If the dex cache already resolved the interface method, look whether we have // a match in the ImtConflictTable. uint32_t imt_index = interface_method->GetDexMethodIndex(); - ArtMethod* conflict_method = imt->Get(imt_index % ImTable::kSize, sizeof(void*)); + ArtMethod* conflict_method = cls->GetEmbeddedImTableEntry( + imt_index % mirror::Class::kImtSize, sizeof(void*)); if (LIKELY(conflict_method->IsRuntimeMethod())) { ImtConflictTable* current_table = conflict_method->GetImtConflictTable(sizeof(void*)); DCHECK(current_table != nullptr); @@ -2227,7 +2227,8 @@ extern "C" TwoWordReturn artInvokeInterfaceTrampoline(uint32_t deadbeef ATTRIBUT // We arrive here if we have found an implementation, and it is not in the ImtConflictTable. // We create a new table with the new pair { interface_method, method }. uint32_t imt_index = interface_method->GetDexMethodIndex(); - ArtMethod* conflict_method = imt->Get(imt_index % ImTable::kSize, sizeof(void*)); + ArtMethod* conflict_method = cls->GetEmbeddedImTableEntry( + imt_index % mirror::Class::kImtSize, sizeof(void*)); if (conflict_method->IsRuntimeMethod()) { ArtMethod* new_conflict_method = Runtime::Current()->GetClassLinker()->AddMethodToConflictTable( cls.Get(), @@ -2238,9 +2239,9 @@ extern "C" TwoWordReturn artInvokeInterfaceTrampoline(uint32_t deadbeef ATTRIBUT if (new_conflict_method != conflict_method) { // Update the IMT if we create a new conflict method. No fence needed here, as the // data is consistent. - imt->Set(imt_index % ImTable::kSize, - new_conflict_method, - sizeof(void*)); + cls->SetEmbeddedImTableEntry(imt_index % mirror::Class::kImtSize, + new_conflict_method, + sizeof(void*)); } } diff --git a/runtime/gc/space/image_space.cc b/runtime/gc/space/image_space.cc index 8cadc2e0f..e896c7a72 100644 --- a/runtime/gc/space/image_space.cc +++ b/runtime/gc/space/image_space.cc @@ -1130,10 +1130,6 @@ static bool RelocateInPlace(ImageHeader& image_header, image_header.VisitPackedArtFields(&field_visitor, target_base); } { - TimingLogger::ScopedTiming timing("Fixup imt", &logger); - image_header.VisitPackedImTables(fixup_adapter, target_base, pointer_size); - } - { TimingLogger::ScopedTiming timing("Fixup conflict tables", &logger); image_header.VisitPackedImtConflictTables(fixup_adapter, target_base, pointer_size); } diff --git a/runtime/image-inl.h b/runtime/image-inl.h index cd0557a23..ea75a622c 100644 --- a/runtime/image-inl.h +++ b/runtime/image-inl.h @@ -20,7 +20,6 @@ #include "image.h" #include "art_method.h" -#include "imtable.h" namespace art { @@ -46,24 +45,6 @@ inline mirror::ObjectArray* ImageHeader::GetImageRoots() const { } template -inline void ImageHeader::VisitPackedImTables(const Visitor& visitor, - uint8_t* base, - size_t pointer_size) const { - const ImageSection& section = GetImageSection(kSectionImTables); - for (size_t pos = 0; pos < section.Size();) { - ImTable* imt = reinterpret_cast(base + section.Offset() + pos); - for (size_t i = 0; i < ImTable::kSize; ++i) { - ArtMethod* orig = imt->Get(i, pointer_size); - ArtMethod* updated = visitor(orig); - if (updated != orig) { - imt->Set(i, updated, pointer_size); - } - } - pos += ImTable::SizeInBytes(pointer_size); - } -} - -template inline void ImageHeader::VisitPackedImtConflictTables(const Visitor& visitor, uint8_t* base, size_t pointer_size) const { diff --git a/runtime/image.cc b/runtime/image.cc index 2362a92c2..a9552c27d 100644 --- a/runtime/image.cc +++ b/runtime/image.cc @@ -24,7 +24,7 @@ namespace art { const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' }; -const uint8_t ImageHeader::kImageVersion[] = { '0', '3', '0', '\0' }; +const uint8_t ImageHeader::kImageVersion[] = { '0', '2', '9', '\0' }; ImageHeader::ImageHeader(uint32_t image_begin, uint32_t image_size, diff --git a/runtime/image.h b/runtime/image.h index 06f06eed0..2ea9af772 100644 --- a/runtime/image.h +++ b/runtime/image.h @@ -195,7 +195,6 @@ class PACKED(4) ImageHeader { kSectionArtFields, kSectionArtMethods, kSectionRuntimeMethods, - kSectionImTables, kSectionIMTConflictTables, kSectionDexCacheArrays, kSectionInternedStrings, @@ -280,11 +279,6 @@ class PACKED(4) ImageHeader { void VisitPackedArtFields(ArtFieldVisitor* visitor, uint8_t* base) const; template - void VisitPackedImTables(const Visitor& visitor, - uint8_t* base, - size_t pointer_size) const; - - template void VisitPackedImtConflictTables(const Visitor& visitor, uint8_t* base, size_t pointer_size) const; diff --git a/runtime/imtable.h b/runtime/imtable.h deleted file mode 100644 index 51faf70d1..000000000 --- a/runtime/imtable.h +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright (C) 2016 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ART_RUNTIME_IMTABLE_H_ -#define ART_RUNTIME_IMTABLE_H_ - -#ifndef IMT_SIZE -#error IMT_SIZE not defined -#endif - -namespace art { - -class ArtMethod; - -class ImTable { - public: - // Interface method table size. Increasing this value reduces the chance of two interface methods - // colliding in the interface method table but increases the size of classes that implement - // (non-marker) interfaces. - static constexpr size_t kSize = IMT_SIZE; - - ArtMethod* Get(size_t index, size_t pointer_size) { - DCHECK_LT(index, kSize); - uint8_t* ptr = reinterpret_cast(this) + OffsetOfElement(index, pointer_size); - if (pointer_size == 4) { - uint32_t value = *reinterpret_cast(ptr); - return reinterpret_cast(value); - } else { - uint64_t value = *reinterpret_cast(ptr); - return reinterpret_cast(value); - } - } - - void Set(size_t index, ArtMethod* method, size_t pointer_size) { - DCHECK_LT(index, kSize); - uint8_t* ptr = reinterpret_cast(this) + OffsetOfElement(index, pointer_size); - if (pointer_size == 4) { - uintptr_t value = reinterpret_cast(method); - DCHECK_EQ(static_cast(value), value); // Check that we dont lose any non 0 bits. - *reinterpret_cast(ptr) = static_cast(value); - } else { - *reinterpret_cast(ptr) = reinterpret_cast(method); - } - } - - static size_t OffsetOfElement(size_t index, size_t pointer_size) { - return index * pointer_size; - } - - void Populate(ArtMethod** data, size_t pointer_size) { - for (size_t i = 0; i < kSize; ++i) { - Set(i, data[i], pointer_size); - } - } - - constexpr static size_t SizeInBytes(size_t pointer_size) { - return kSize * pointer_size; - } -}; - -} // namespace art - -#endif // ART_RUNTIME_IMTABLE_H_ - diff --git a/runtime/interpreter/interpreter_common.h b/runtime/interpreter/interpreter_common.h index 3750b7ad1..cc470f372 100644 --- a/runtime/interpreter/interpreter_common.h +++ b/runtime/interpreter/interpreter_common.h @@ -679,7 +679,7 @@ static inline bool DoInvokeVirtualQuick(Thread* self, ShadowFrame& shadow_frame, return false; } const uint32_t vtable_idx = (is_range) ? inst->VRegB_3rc() : inst->VRegB_35c(); - CHECK(receiver->GetClass()->ShouldHaveEmbeddedVTable()); + CHECK(receiver->GetClass()->ShouldHaveEmbeddedImtAndVTable()); ArtMethod* const called_method = receiver->GetClass()->GetEmbeddedVTableEntry( vtable_idx, sizeof(void*)); if (UNLIKELY(called_method == nullptr)) { diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h index b783a019e..cefd9f031 100644 --- a/runtime/mirror/class-inl.h +++ b/runtime/mirror/class-inl.h @@ -247,19 +247,38 @@ inline void Class::SetVTable(PointerArray* new_vtable) { SetFieldObject(OFFSET_OF_OBJECT_MEMBER(Class, vtable_), new_vtable); } +inline MemberOffset Class::EmbeddedImTableEntryOffset(uint32_t i, size_t pointer_size) { + DCHECK_LT(i, kImtSize); + return MemberOffset( + EmbeddedImTableOffset(pointer_size).Uint32Value() + i * ImTableEntrySize(pointer_size)); +} + +template +inline ArtMethod* Class::GetEmbeddedImTableEntry(uint32_t i, size_t pointer_size) { + DCHECK((ShouldHaveEmbeddedImtAndVTable())); + return GetFieldPtrWithSize( + EmbeddedImTableEntryOffset(i, pointer_size), pointer_size); +} + +template +inline void Class::SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method, size_t pointer_size) { + DCHECK((ShouldHaveEmbeddedImtAndVTable())); + SetFieldPtrWithSize(EmbeddedImTableEntryOffset(i, pointer_size), method, pointer_size); +} + inline bool Class::HasVTable() { - return GetVTable() != nullptr || ShouldHaveEmbeddedVTable(); + return GetVTable() != nullptr || ShouldHaveEmbeddedImtAndVTable(); } inline int32_t Class::GetVTableLength() { - if (ShouldHaveEmbeddedVTable()) { + if (ShouldHaveEmbeddedImtAndVTable()) { return GetEmbeddedVTableLength(); } return GetVTable() != nullptr ? GetVTable()->GetLength() : 0; } inline ArtMethod* Class::GetVTableEntry(uint32_t i, size_t pointer_size) { - if (ShouldHaveEmbeddedVTable()) { + if (ShouldHaveEmbeddedImtAndVTable()) { return GetEmbeddedVTableEntry(i, pointer_size); } auto* vtable = GetVTable(); @@ -275,14 +294,6 @@ inline void Class::SetEmbeddedVTableLength(int32_t len) { SetField32(MemberOffset(EmbeddedVTableLengthOffset()), len); } -inline ImTable* Class::GetImt(size_t pointer_size) { - return GetFieldPtrWithSize(MemberOffset(ImtPtrOffset(pointer_size)), pointer_size); -} - -inline void Class::SetImt(ImTable* imt, size_t pointer_size) { - return SetFieldPtrWithSize(MemberOffset(ImtPtrOffset(pointer_size)), imt, pointer_size); -} - inline MemberOffset Class::EmbeddedVTableEntryOffset(uint32_t i, size_t pointer_size) { return MemberOffset( EmbeddedVTableOffset(pointer_size).Uint32Value() + i * VTableEntrySize(pointer_size)); @@ -530,7 +541,7 @@ template inline MemberOffset Class::GetFirstReferenceStaticFieldOffset(size_t pointer_size) { DCHECK(IsResolved()); uint32_t base = sizeof(mirror::Class); // Static fields come after the class. - if (ShouldHaveEmbeddedVTable()) { + if (ShouldHaveEmbeddedImtAndVTable()) { // Static fields come after the embedded tables. base = mirror::Class::ComputeClassSize( true, GetEmbeddedVTableLength(), 0, 0, 0, 0, 0, pointer_size); @@ -541,7 +552,7 @@ inline MemberOffset Class::GetFirstReferenceStaticFieldOffset(size_t pointer_siz inline MemberOffset Class::GetFirstReferenceStaticFieldOffsetDuringLinking(size_t pointer_size) { DCHECK(IsLoaded()); uint32_t base = sizeof(mirror::Class); // Static fields come after the class. - if (ShouldHaveEmbeddedVTable()) { + if (ShouldHaveEmbeddedImtAndVTable()) { // Static fields come after the embedded tables. base = mirror::Class::ComputeClassSize(true, GetVTableDuringLinking()->GetLength(), 0, 0, 0, 0, 0, pointer_size); @@ -700,7 +711,7 @@ inline Object* Class::AllocNonMovableObject(Thread* self) { return Alloc(self, Runtime::Current()->GetHeap()->GetCurrentNonMovingAllocator()); } -inline uint32_t Class::ComputeClassSize(bool has_embedded_vtable, +inline uint32_t Class::ComputeClassSize(bool has_embedded_tables, uint32_t num_vtable_entries, uint32_t num_8bit_static_fields, uint32_t num_16bit_static_fields, @@ -711,10 +722,11 @@ inline uint32_t Class::ComputeClassSize(bool has_embedded_vtable, // Space used by java.lang.Class and its instance fields. uint32_t size = sizeof(Class); // Space used by embedded tables. - if (has_embedded_vtable) { - size = RoundUp(size + sizeof(uint32_t), pointer_size); - size += pointer_size; // size of pointer to IMT - size += num_vtable_entries * VTableEntrySize(pointer_size); + if (has_embedded_tables) { + const uint32_t embedded_imt_size = kImtSize * ImTableEntrySize(pointer_size); + const uint32_t embedded_vtable_size = num_vtable_entries * VTableEntrySize(pointer_size); + size = RoundUp(size + sizeof(uint32_t) /* embedded vtable len */, pointer_size) + + embedded_imt_size + embedded_vtable_size; } // Space used by reference statics. @@ -978,9 +990,18 @@ inline IterationRange> Class::GetSFieldsUnchecked() { return MakeIterationRangeFromLengthPrefixedArray(GetSFieldsPtrUnchecked()); } +inline MemberOffset Class::EmbeddedImTableOffset(size_t pointer_size) { + CheckPointerSize(pointer_size); + // Round up since we want the embedded imt and vtable to be pointer size aligned in case 64 bits. + // Add 32 bits for embedded vtable length. + return MemberOffset( + RoundUp(EmbeddedVTableLengthOffset().Uint32Value() + sizeof(uint32_t), pointer_size)); +} + inline MemberOffset Class::EmbeddedVTableOffset(size_t pointer_size) { CheckPointerSize(pointer_size); - return MemberOffset(ImtPtrOffset(pointer_size).Uint32Value() + pointer_size); + return MemberOffset(EmbeddedImTableOffset(pointer_size).Uint32Value() + + kImtSize * ImTableEntrySize(pointer_size)); } inline void Class::CheckPointerSize(size_t pointer_size) { @@ -1065,7 +1086,7 @@ inline void Class::FixupNativePointers(mirror::Class* dest, dest->SetDexCacheStrings(new_strings); } // Fix up embedded tables. - if (!IsTemp() && ShouldHaveEmbeddedVTable()) { + if (!IsTemp() && ShouldHaveEmbeddedImtAndVTable()) { for (int32_t i = 0, count = GetEmbeddedVTableLength(); i < count; ++i) { ArtMethod* method = GetEmbeddedVTableEntry(i, pointer_size); ArtMethod* new_method = visitor(method); @@ -1073,9 +1094,16 @@ inline void Class::FixupNativePointers(mirror::Class* dest, dest->SetEmbeddedVTableEntryUnchecked(i, new_method, pointer_size); } } - } - if (!IsTemp() && ShouldHaveImt()) { - dest->SetImt(visitor(GetImt(pointer_size)), pointer_size); + for (size_t i = 0; i < mirror::Class::kImtSize; ++i) { + ArtMethod* method = GetEmbeddedImTableEntry(i, + pointer_size); + ArtMethod* new_method = visitor(method); + if (method != new_method) { + dest->SetEmbeddedImTableEntry(i, + new_method, + pointer_size); + } + } } } diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc index 9c77d3814..b4a23badb 100644 --- a/runtime/mirror/class.cc +++ b/runtime/mirror/class.cc @@ -914,7 +914,13 @@ const DexFile::TypeList* Class::GetInterfaceTypeList() { return GetDexFile().GetInterfacesList(*class_def); } -void Class::PopulateEmbeddedVTable(size_t pointer_size) { +void Class::PopulateEmbeddedImtAndVTable(ArtMethod* const (&methods)[kImtSize], + size_t pointer_size) { + for (size_t i = 0; i < kImtSize; i++) { + auto method = methods[i]; + DCHECK(method != nullptr); + SetEmbeddedImTableEntry(i, method, pointer_size); + } PointerArray* table = GetVTableDuringLinking(); CHECK(table != nullptr) << PrettyClass(this); const size_t table_length = table->GetLength(); @@ -961,7 +967,7 @@ class ReadBarrierOnNativeRootsVisitor { class CopyClassVisitor { public: CopyClassVisitor(Thread* self, Handle* orig, size_t new_length, - size_t copy_bytes, ImTable* imt, + size_t copy_bytes, ArtMethod* const (&imt)[mirror::Class::kImtSize], size_t pointer_size) : self_(self), orig_(orig), new_length_(new_length), copy_bytes_(copy_bytes), imt_(imt), pointer_size_(pointer_size) { @@ -973,8 +979,7 @@ class CopyClassVisitor { Handle h_new_class_obj(hs.NewHandle(obj->AsClass())); mirror::Object::CopyObject(self_, h_new_class_obj.Get(), orig_->Get(), copy_bytes_); mirror::Class::SetStatus(h_new_class_obj, Class::kStatusResolving, self_); - h_new_class_obj->PopulateEmbeddedVTable(pointer_size_); - h_new_class_obj->SetImt(imt_, pointer_size_); + h_new_class_obj->PopulateEmbeddedImtAndVTable(imt_, pointer_size_); h_new_class_obj->SetClassSize(new_length_); // Visit all of the references to make sure there is no from space references in the native // roots. @@ -987,13 +992,13 @@ class CopyClassVisitor { Handle* const orig_; const size_t new_length_; const size_t copy_bytes_; - ImTable* imt_; + ArtMethod* const (&imt_)[mirror::Class::kImtSize]; const size_t pointer_size_; DISALLOW_COPY_AND_ASSIGN(CopyClassVisitor); }; Class* Class::CopyOf(Thread* self, int32_t new_length, - ImTable* imt, size_t pointer_size) { + ArtMethod* const (&imt)[mirror::Class::kImtSize], size_t pointer_size) { DCHECK_GE(new_length, static_cast(sizeof(Class))); // We may get copied by a compacting GC. StackHandleScope<1> hs(self); diff --git a/runtime/mirror/class.h b/runtime/mirror/class.h index 9670accf5..5235a3e8d 100644 --- a/runtime/mirror/class.h +++ b/runtime/mirror/class.h @@ -22,7 +22,6 @@ #include "class_flags.h" #include "gc_root.h" #include "gc/allocator_type.h" -#include "imtable.h" #include "invoke_type.h" #include "modifiers.h" #include "object.h" @@ -34,6 +33,10 @@ #include "thread.h" #include "utils.h" +#ifndef IMT_SIZE +#error IMT_SIZE not defined +#endif + namespace art { class ArtField; @@ -63,6 +66,11 @@ class MANAGED Class FINAL : public Object { // 2 ref instance fields.] static constexpr uint32_t kClassWalkSuper = 0xC0000000; + // Interface method table size. Increasing this value reduces the chance of two interface methods + // colliding in the interface method table but increases the size of classes that implement + // (non-marker) interfaces. + static constexpr size_t kImtSize = IMT_SIZE; + // Class Status // // kStatusRetired: Class that's temporarily used till class linking time @@ -343,7 +351,7 @@ class MANAGED Class FINAL : public Object { // be replaced with a class with the right size for embedded imt/vtable. bool IsTemp() SHARED_REQUIRES(Locks::mutator_lock_) { Status s = GetStatus(); - return s < Status::kStatusResolving && ShouldHaveEmbeddedVTable(); + return s < Status::kStatusResolving && ShouldHaveEmbeddedImtAndVTable(); } String* GetName() SHARED_REQUIRES(Locks::mutator_lock_); // Returns the cached name. @@ -549,7 +557,7 @@ class MANAGED Class FINAL : public Object { SHARED_REQUIRES(Locks::mutator_lock_); // Compute how many bytes would be used a class with the given elements. - static uint32_t ComputeClassSize(bool has_embedded_vtable, + static uint32_t ComputeClassSize(bool has_embedded_tables, uint32_t num_vtable_entries, uint32_t num_8bit_static_fields, uint32_t num_16bit_static_fields, @@ -822,28 +830,28 @@ class MANAGED Class FINAL : public Object { return MemberOffset(sizeof(Class)); } - static MemberOffset ImtPtrOffset(size_t pointer_size) { - return MemberOffset( - RoundUp(EmbeddedVTableLengthOffset().Uint32Value() + sizeof(uint32_t), pointer_size)); - } - template - bool ShouldHaveImt() SHARED_REQUIRES(Locks::mutator_lock_) { - return ShouldHaveEmbeddedVTable() && - GetIfTable() != nullptr && !IsArrayClass(); - } - - template - bool ShouldHaveEmbeddedVTable() SHARED_REQUIRES(Locks::mutator_lock_) { + bool ShouldHaveEmbeddedImtAndVTable() SHARED_REQUIRES(Locks::mutator_lock_) { return IsInstantiable(); } bool HasVTable() SHARED_REQUIRES(Locks::mutator_lock_); + static MemberOffset EmbeddedImTableEntryOffset(uint32_t i, size_t pointer_size); + static MemberOffset EmbeddedVTableEntryOffset(uint32_t i, size_t pointer_size); + template + ArtMethod* GetEmbeddedImTableEntry(uint32_t i, size_t pointer_size) + SHARED_REQUIRES(Locks::mutator_lock_); + + template + void SetEmbeddedImTableEntry(uint32_t i, ArtMethod* method, size_t pointer_size) + SHARED_REQUIRES(Locks::mutator_lock_); + int32_t GetVTableLength() SHARED_REQUIRES(Locks::mutator_lock_); ArtMethod* GetVTableEntry(uint32_t i, size_t pointer_size) @@ -853,10 +861,6 @@ class MANAGED Class FINAL : public Object { void SetEmbeddedVTableLength(int32_t len) SHARED_REQUIRES(Locks::mutator_lock_); - ImTable* GetImt(size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); - - void SetImt(ImTable* imt, size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); - ArtMethod* GetEmbeddedVTableEntry(uint32_t i, size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); @@ -866,7 +870,7 @@ class MANAGED Class FINAL : public Object { inline void SetEmbeddedVTableEntryUnchecked(uint32_t i, ArtMethod* method, size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); - void PopulateEmbeddedVTable(size_t pointer_size) + void PopulateEmbeddedImtAndVTable(ArtMethod* const (&methods)[kImtSize], size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_); // Given a method implemented by this class but potentially from a super class, return the @@ -1191,7 +1195,7 @@ class MANAGED Class FINAL : public Object { void AssertInitializedOrInitializingInThread(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_); - Class* CopyOf(Thread* self, int32_t new_length, ImTable* imt, + Class* CopyOf(Thread* self, int32_t new_length, ArtMethod* const (&imt)[mirror::Class::kImtSize], size_t pointer_size) SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Roles::uninterruptible_); @@ -1318,7 +1322,10 @@ class MANAGED Class FINAL : public Object { // Check that the pointer size matches the one in the class linker. ALWAYS_INLINE static void CheckPointerSize(size_t pointer_size); + + static MemberOffset EmbeddedImTableOffset(size_t pointer_size); static MemberOffset EmbeddedVTableOffset(size_t pointer_size); + template