OSDN Git Service

Change type initialization entrypoints to kSaveEverything.
authorVladimir Marko <vmarko@google.com>
Mon, 6 Feb 2017 19:59:33 +0000 (19:59 +0000)
committerVladimir Marko <vmarko@google.com>
Wed, 8 Feb 2017 21:24:01 +0000 (21:24 +0000)
Also avoid the unnecessary read barriers for boot image
classes with kBssEntry or kJitTableAddress (the kBssEntry
and JIT work missed the `read_barrier_option` flag), fix
bit-rotten non-Baker read barriers on ARM and ARM64 and
fix bit-rotten ARM64 relative patcher's IsAdrpPatch() used
for erratum 843419 workaround.

aosp_angler-userdebug with CC:
  before:
    arm boot*.oat: 35440420
    arm64 boot*.oat: 43504952
  after:
    arm boot*.oat: 35222292 (-218128, -0.62%)
    arm64 boot*.oat: 43389048 (-115904, -0.26%)

aosp_angler-userdebug without CC:
  before:
    arm boot*.oat: 31927412
    arm64 boot*.oat: 39340512
  after:
    arm boot*.oat: 31708736 (-218676, -0.68%)
    arm64 boot*.oat: 39211768 (-128744, -0.33%)

Test: m test-art-host (non-CC, Baker CC, table lookup CC)
Test: m test-art-target on Nexus 6P (non-CC, Baker CC, table lookup CC)
Test: Nexus 6P boots (non-CC, Baker CC, table lookup CC)
Bug: 30627598
Change-Id: Ida5bbce414844de9e4273e40334165d4494230d4

compiler/linker/arm64/relative_patcher_arm64.cc
compiler/optimizing/code_generator_arm.cc
compiler/optimizing/code_generator_arm64.cc
compiler/optimizing/code_generator_arm_vixl.cc
compiler/optimizing/code_generator_x86.cc
compiler/optimizing/code_generator_x86_64.cc
runtime/arch/arm/quick_entrypoints_arm.S
runtime/arch/arm64/quick_entrypoints_arm64.S
runtime/arch/x86/quick_entrypoints_x86.S
runtime/arch/x86_64/quick_entrypoints_x86_64.S
runtime/entrypoints/quick/quick_dexcache_entrypoints.cc

index 79e1785..9ddf200 100644 (file)
@@ -31,9 +31,7 @@ namespace linker {
 namespace {
 
 inline bool IsAdrpPatch(const LinkerPatch& patch) {
-  LinkerPatch::Type type = patch.GetType();
-  return
-      (type == LinkerPatch::Type::kStringRelative || type == LinkerPatch::Type::kDexCacheArray) &&
+  return (patch.IsPcRelative() && patch.GetType() != LinkerPatch::Type::kCallRelative) &&
       patch.LiteralOffset() == patch.PcInsnOffset();
 }
 
@@ -214,11 +212,11 @@ void Arm64RelativePatcher::PatchPcRelativeReference(std::vector<uint8_t>* code,
         DCHECK(patch.GetType() == LinkerPatch::Type::kStringRelative ||
                patch.GetType() == LinkerPatch::Type::kTypeRelative) << patch.GetType();
       } else {
-        // With the read barrier (non-Baker) enabled, it could be kDexCacheArray in the
-        // HLoadString::LoadKind::kDexCachePcRelative case of VisitLoadString().
+        // With the read barrier (non-Baker) enabled, it could be kStringBssEntry or kTypeBssEntry.
         DCHECK(patch.GetType() == LinkerPatch::Type::kStringRelative ||
                patch.GetType() == LinkerPatch::Type::kTypeRelative ||
-               patch.GetType() == LinkerPatch::Type::kDexCacheArray) << patch.GetType();
+               patch.GetType() == LinkerPatch::Type::kStringBssEntry ||
+               patch.GetType() == LinkerPatch::Type::kTypeBssEntry) << patch.GetType();
       }
       shift = 0u;  // No shift for ADD.
     } else {
index 20cdae3..06e164f 100644 (file)
@@ -367,22 +367,37 @@ class BoundsCheckSlowPathARM : public SlowPathCodeARM {
 
 class LoadClassSlowPathARM : public SlowPathCodeARM {
  public:
-  LoadClassSlowPathARM(HLoadClass* cls,
-                       HInstruction* at,
-                       uint32_t dex_pc,
-                       bool do_clinit)
+  LoadClassSlowPathARM(HLoadClass* cls, HInstruction* at, uint32_t dex_pc, bool do_clinit)
       : SlowPathCodeARM(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
     DCHECK(at->IsLoadClass() || at->IsClinitCheck());
   }
 
   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
     LocationSummary* locations = instruction_->GetLocations();
+    Location out = locations->Out();
+    constexpr bool call_saves_everything_except_r0 = (!kUseReadBarrier || kUseBakerReadBarrier);
 
     CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
     __ Bind(GetEntryLabel());
     SaveLiveRegisters(codegen, locations);
 
     InvokeRuntimeCallingConvention calling_convention;
+    // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
+    DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
+    bool is_load_class_bss_entry =
+        (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
+    Register entry_address = kNoRegister;
+    if (is_load_class_bss_entry && call_saves_everything_except_r0) {
+      Register temp = locations->GetTemp(0).AsRegister<Register>();
+      // In the unlucky case that the `temp` is R0, we preserve the address in `out` across
+      // the kSaveEverything call.
+      bool temp_is_r0 = (temp == calling_convention.GetRegisterAt(0));
+      entry_address = temp_is_r0 ? out.AsRegister<Register>() : temp;
+      DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
+      if (temp_is_r0) {
+        __ mov(entry_address, ShifterOperand(temp));
+      }
+    }
     dex::TypeIndex type_index = cls_->GetTypeIndex();
     __ LoadImmediate(calling_convention.GetRegisterAt(0), type_index.index_);
     QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
@@ -394,30 +409,31 @@ class LoadClassSlowPathARM : public SlowPathCodeARM {
       CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
     }
 
+    // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
+    if (is_load_class_bss_entry) {
+      if (call_saves_everything_except_r0) {
+        // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
+        __ str(R0, Address(entry_address));
+      } else {
+        // For non-Baker read barrier, we need to re-calculate the address of the string entry.
+        Register temp = IP;
+        CodeGeneratorARM::PcRelativePatchInfo* labels =
+            arm_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
+        __ BindTrackedLabel(&labels->movw_label);
+        __ movw(temp, /* placeholder */ 0u);
+        __ BindTrackedLabel(&labels->movt_label);
+        __ movt(temp, /* placeholder */ 0u);
+        __ BindTrackedLabel(&labels->add_pc_label);
+        __ add(temp, temp, ShifterOperand(PC));
+        __ str(R0, Address(temp));
+      }
+    }
     // Move the class to the desired location.
-    Location out = locations->Out();
     if (out.IsValid()) {
       DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
       arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
     }
     RestoreLiveRegisters(codegen, locations);
-    // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
-    DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
-    if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
-      DCHECK(out.IsValid());
-      // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
-      // kSaveEverything and use a temporary for the .bss entry address in the fast path,
-      // so that we can avoid another calculation here.
-      CodeGeneratorARM::PcRelativePatchInfo* labels =
-          arm_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
-      __ BindTrackedLabel(&labels->movw_label);
-      __ movw(IP, /* placeholder */ 0u);
-      __ BindTrackedLabel(&labels->movt_label);
-      __ movt(IP, /* placeholder */ 0u);
-      __ BindTrackedLabel(&labels->add_pc_label);
-      __ add(IP, IP, ShifterOperand(PC));
-      __ str(locations->Out().AsRegister<Register>(), Address(IP));
-    }
     __ b(GetExitLabel());
   }
 
@@ -441,12 +457,13 @@ class LoadStringSlowPathARM : public SlowPathCodeARM {
   explicit LoadStringSlowPathARM(HLoadString* instruction) : SlowPathCodeARM(instruction) {}
 
   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
+    DCHECK(instruction_->IsLoadString());
+    DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
     LocationSummary* locations = instruction_->GetLocations();
     DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
     HLoadString* load = instruction_->AsLoadString();
     const dex::StringIndex string_index = load->GetStringIndex();
     Register out = locations->Out().AsRegister<Register>();
-    Register temp = locations->GetTemp(0).AsRegister<Register>();
     constexpr bool call_saves_everything_except_r0 = (!kUseReadBarrier || kUseBakerReadBarrier);
 
     CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
@@ -455,12 +472,16 @@ class LoadStringSlowPathARM : public SlowPathCodeARM {
 
     InvokeRuntimeCallingConvention calling_convention;
     // In the unlucky case that the `temp` is R0, we preserve the address in `out` across
-    // the kSaveEverything call (or use `out` for the address after non-kSaveEverything call).
-    bool temp_is_r0 = (temp == calling_convention.GetRegisterAt(0));
-    Register entry_address = temp_is_r0 ? out : temp;
-    DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
-    if (call_saves_everything_except_r0 && temp_is_r0) {
-      __ mov(entry_address, ShifterOperand(temp));
+    // the kSaveEverything call.
+    Register entry_address = kNoRegister;
+    if (call_saves_everything_except_r0) {
+      Register temp = locations->GetTemp(0).AsRegister<Register>();
+      bool temp_is_r0 = (temp == calling_convention.GetRegisterAt(0));
+      entry_address = temp_is_r0 ? out : temp;
+      DCHECK_NE(entry_address, calling_convention.GetRegisterAt(0));
+      if (temp_is_r0) {
+        __ mov(entry_address, ShifterOperand(temp));
+      }
     }
 
     __ LoadImmediate(calling_convention.GetRegisterAt(0), string_index.index_);
@@ -473,15 +494,16 @@ class LoadStringSlowPathARM : public SlowPathCodeARM {
       __ str(R0, Address(entry_address));
     } else {
       // For non-Baker read barrier, we need to re-calculate the address of the string entry.
+      Register temp = IP;
       CodeGeneratorARM::PcRelativePatchInfo* labels =
           arm_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
       __ BindTrackedLabel(&labels->movw_label);
-      __ movw(entry_address, /* placeholder */ 0u);
+      __ movw(temp, /* placeholder */ 0u);
       __ BindTrackedLabel(&labels->movt_label);
-      __ movt(entry_address, /* placeholder */ 0u);
+      __ movt(temp, /* placeholder */ 0u);
       __ BindTrackedLabel(&labels->add_pc_label);
-      __ add(entry_address, entry_address, ShifterOperand(PC));
-      __ str(R0, Address(entry_address));
+      __ add(temp, temp, ShifterOperand(PC));
+      __ str(R0, Address(temp));
     }
 
     arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
@@ -5755,6 +5777,7 @@ void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
         cls,
         Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
         Location::RegisterLocation(R0));
+    DCHECK_EQ(calling_convention.GetRegisterAt(0), R0);
     return;
   }
   DCHECK(!cls->NeedsAccessCheck());
@@ -5772,6 +5795,22 @@ void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
     locations->SetInAt(0, Location::RequiresRegister());
   }
   locations->SetOut(Location::RequiresRegister());
+  if (load_kind == HLoadClass::LoadKind::kBssEntry) {
+    if (!kUseReadBarrier || kUseBakerReadBarrier) {
+      // Rely on the type resolution or initialization and marking to save everything we need.
+      // Note that IP may be clobbered by saving/restoring the live register (only one thanks
+      // to the custom calling convention) or by marking, so we request a different temp.
+      locations->AddTemp(Location::RequiresRegister());
+      RegisterSet caller_saves = RegisterSet::Empty();
+      InvokeRuntimeCallingConvention calling_convention;
+      caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+      // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK()
+      // that the the kPrimNot result register is the same as the first argument register.
+      locations->SetCustomSlowPathCallerSaves(caller_saves);
+    } else {
+      // For non-Baker read barrier we have a temp-clobbering call.
+    }
+  }
 }
 
 // NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
@@ -5834,15 +5873,18 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFE
       break;
     }
     case HLoadClass::LoadKind::kBssEntry: {
+      Register temp = (!kUseReadBarrier || kUseBakerReadBarrier)
+          ? locations->GetTemp(0).AsRegister<Register>()
+          : out;
       CodeGeneratorARM::PcRelativePatchInfo* labels =
           codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
       __ BindTrackedLabel(&labels->movw_label);
-      __ movw(out, /* placeholder */ 0u);
+      __ movw(temp, /* placeholder */ 0u);
       __ BindTrackedLabel(&labels->movt_label);
-      __ movt(out, /* placeholder */ 0u);
+      __ movt(temp, /* placeholder */ 0u);
       __ BindTrackedLabel(&labels->add_pc_label);
-      __ add(out, out, ShifterOperand(PC));
-      GenerateGcRootFieldLoad(cls, out_loc, out, 0, kCompilerReadBarrierOption);
+      __ add(temp, temp, ShifterOperand(PC));
+      GenerateGcRootFieldLoad(cls, out_loc, temp, /* offset */ 0, read_barrier_option);
       generate_null_check = true;
       break;
     }
@@ -5851,7 +5893,7 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFE
                                                                cls->GetTypeIndex(),
                                                                cls->GetClass()));
       // /* GcRoot<mirror::Class> */ out = *out
-      GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
+      GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, read_barrier_option);
       break;
     }
     case HLoadClass::LoadKind::kDexCacheViaMethod:
@@ -5938,9 +5980,9 @@ void LocationsBuilderARM::VisitLoadString(HLoadString* load) {
     locations->SetOut(Location::RequiresRegister());
     if (load_kind == HLoadString::LoadKind::kBssEntry) {
       if (!kUseReadBarrier || kUseBakerReadBarrier) {
-        // Rely on the pResolveString and/or marking to save everything, including temps.
-        // Note that IP may theoretically be clobbered by saving/restoring the live register
-        // (only one thanks to the custom calling convention), so we request a different temp.
+        // Rely on the pResolveString and marking to save everything we need, including temps.
+        // Note that IP may be clobbered by saving/restoring the live register (only one thanks
+        // to the custom calling convention) or by marking, so we request a different temp.
         locations->AddTemp(Location::RequiresRegister());
         RegisterSet caller_saves = RegisterSet::Empty();
         InvokeRuntimeCallingConvention calling_convention;
@@ -5991,7 +6033,9 @@ void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) NO_THREAD_S
     }
     case HLoadString::LoadKind::kBssEntry: {
       DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
-      Register temp = locations->GetTemp(0).AsRegister<Register>();
+      Register temp = (!kUseReadBarrier || kUseBakerReadBarrier)
+          ? locations->GetTemp(0).AsRegister<Register>()
+          : out;
       CodeGeneratorARM::PcRelativePatchInfo* labels =
           codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
       __ BindTrackedLabel(&labels->movw_label);
index 598be47..248cee1 100644 (file)
@@ -275,15 +275,37 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
   LoadClassSlowPathARM64(HLoadClass* cls,
                          HInstruction* at,
                          uint32_t dex_pc,
-                         bool do_clinit)
-      : SlowPathCodeARM64(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
+                         bool do_clinit,
+                         vixl::aarch64::Register bss_entry_temp = vixl::aarch64::Register(),
+                         vixl::aarch64::Label* bss_entry_adrp_label = nullptr)
+      : SlowPathCodeARM64(at),
+        cls_(cls),
+        dex_pc_(dex_pc),
+        do_clinit_(do_clinit),
+        bss_entry_temp_(bss_entry_temp),
+        bss_entry_adrp_label_(bss_entry_adrp_label) {
     DCHECK(at->IsLoadClass() || at->IsClinitCheck());
   }
 
   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
     LocationSummary* locations = instruction_->GetLocations();
+    Location out = locations->Out();
+    constexpr bool call_saves_everything_except_r0_ip0 = (!kUseReadBarrier || kUseBakerReadBarrier);
     CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
 
+    // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the page address of
+    // the entry which is in a scratch register. Make sure it's not used for saving/restoring
+    // registers. Exclude the scratch register also for non-Baker read barrier for simplicity.
+    DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
+    bool is_load_class_bss_entry =
+        (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
+    UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
+    if (is_load_class_bss_entry) {
+      // This temp is a scratch register.
+      DCHECK(bss_entry_temp_.IsValid());
+      temps.Exclude(bss_entry_temp_);
+    }
+
     __ Bind(GetEntryLabel());
     SaveLiveRegisters(codegen, locations);
 
@@ -300,7 +322,6 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
     }
 
     // Move the class to the desired location.
-    Location out = locations->Out();
     if (out.IsValid()) {
       DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
       Primitive::Type type = instruction_->GetType();
@@ -308,25 +329,23 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
     }
     RestoreLiveRegisters(codegen, locations);
     // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
-    DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
-    if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
+    if (is_load_class_bss_entry) {
       DCHECK(out.IsValid());
-      UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
-      Register temp = temps.AcquireX();
       const DexFile& dex_file = cls_->GetDexFile();
-      // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
-      // kSaveEverything and use a temporary for the ADRP in the fast path, so that we
-      // can avoid the ADRP here.
-      vixl::aarch64::Label* adrp_label =
-          arm64_codegen->NewBssEntryTypePatch(dex_file, type_index);
-      arm64_codegen->EmitAdrpPlaceholder(adrp_label, temp);
+      if (call_saves_everything_except_r0_ip0) {
+        // The class entry page address was preserved in bss_entry_temp_ thanks to kSaveEverything.
+      } else {
+        // For non-Baker read barrier, we need to re-calculate the address of the class entry page.
+        bss_entry_adrp_label_ = arm64_codegen->NewBssEntryTypePatch(dex_file, type_index);
+        arm64_codegen->EmitAdrpPlaceholder(bss_entry_adrp_label_, bss_entry_temp_);
+      }
       vixl::aarch64::Label* strp_label =
-          arm64_codegen->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
+          arm64_codegen->NewBssEntryTypePatch(dex_file, type_index, bss_entry_adrp_label_);
       {
         SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
         __ Bind(strp_label);
         __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
-               MemOperand(temp, /* offset placeholder */ 0));
+               MemOperand(bss_entry_temp_, /* offset placeholder */ 0));
       }
     }
     __ B(GetExitLabel());
@@ -344,6 +363,10 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
   // Whether to initialize the class.
   const bool do_clinit_;
 
+  // For HLoadClass/kBssEntry, the temp register and the label of the ADRP where it was loaded.
+  vixl::aarch64::Register bss_entry_temp_;
+  vixl::aarch64::Label* bss_entry_adrp_label_;
+
   DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64);
 };
 
@@ -4393,6 +4416,7 @@ void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
         cls,
         LocationFrom(calling_convention.GetRegisterAt(0)),
         LocationFrom(vixl::aarch64::x0));
+    DCHECK(calling_convention.GetRegisterAt(0).Is(vixl::aarch64::x0));
     return;
   }
   DCHECK(!cls->NeedsAccessCheck());
@@ -4410,6 +4434,22 @@ void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
     locations->SetInAt(0, Location::RequiresRegister());
   }
   locations->SetOut(Location::RequiresRegister());
+  if (cls->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
+    if (!kUseReadBarrier || kUseBakerReadBarrier) {
+      // Rely on the type resolution or initialization and marking to save everything we need.
+      // Note that IP0 may be clobbered by saving/restoring the live register (only one thanks
+      // to the custom calling convention) or by marking, so we shall use IP1.
+      RegisterSet caller_saves = RegisterSet::Empty();
+      InvokeRuntimeCallingConvention calling_convention;
+      caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
+      DCHECK_EQ(calling_convention.GetRegisterAt(0).GetCode(),
+                RegisterFrom(calling_convention.GetReturnLocation(Primitive::kPrimNot),
+                             Primitive::kPrimNot).GetCode());
+      locations->SetCustomSlowPathCallerSaves(caller_saves);
+    } else {
+      // For non-Baker read barrier we have a temp-clobbering call.
+    }
+  }
 }
 
 // NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
@@ -4424,6 +4464,8 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SA
 
   Location out_loc = cls->GetLocations()->Out();
   Register out = OutputRegister(cls);
+  Register bss_entry_temp;
+  vixl::aarch64::Label* bss_entry_adrp_label = nullptr;
 
   const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
       ? kWithoutReadBarrier
@@ -4473,18 +4515,23 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SA
       // Add ADRP with its PC-relative Class .bss entry patch.
       const DexFile& dex_file = cls->GetDexFile();
       dex::TypeIndex type_index = cls->GetTypeIndex();
-      vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
-      codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
+      // We can go to slow path even with non-zero reference and in that case marking
+      // can clobber IP0, so we need to use IP1 which shall be preserved.
+      bss_entry_temp = ip1;
+      UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
+      temps.Exclude(bss_entry_temp);
+      bss_entry_adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
+      codegen_->EmitAdrpPlaceholder(bss_entry_adrp_label, bss_entry_temp);
       // Add LDR with its PC-relative Class patch.
       vixl::aarch64::Label* ldr_label =
-          codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
+          codegen_->NewBssEntryTypePatch(dex_file, type_index, bss_entry_adrp_label);
       // /* GcRoot<mirror::Class> */ out = *(base_address + offset)  /* PC-relative */
       GenerateGcRootFieldLoad(cls,
-                              cls->GetLocations()->Out(),
-                              out.X(),
-                              /* placeholder */ 0u,
+                              out_loc,
+                              bss_entry_temp,
+                              /* offset placeholder */ 0u,
                               ldr_label,
-                              kCompilerReadBarrierOption);
+                              read_barrier_option);
       generate_null_check = true;
       break;
     }
@@ -4497,7 +4544,7 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SA
                               out.X(),
                               /* offset */ 0,
                               /* fixup_label */ nullptr,
-                              kCompilerReadBarrierOption);
+                              read_barrier_option);
       break;
     }
     case HLoadClass::LoadKind::kDexCacheViaMethod:
@@ -4506,10 +4553,11 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SA
       UNREACHABLE();
   }
 
-  if (generate_null_check || cls->MustGenerateClinitCheck()) {
+  bool do_clinit = cls->MustGenerateClinitCheck();
+  if (generate_null_check || do_clinit) {
     DCHECK(cls->CanCallRuntime());
     SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM64(
-        cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck());
+        cls, cls, cls->GetDexPc(), do_clinit, bss_entry_temp, bss_entry_adrp_label);
     codegen_->AddSlowPath(slow_path);
     if (generate_null_check) {
       __ Cbz(out, slow_path->GetEntryLabel());
@@ -4577,7 +4625,9 @@ void LocationsBuilderARM64::VisitLoadString(HLoadString* load) {
     locations->SetOut(Location::RequiresRegister());
     if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
       if (!kUseReadBarrier || kUseBakerReadBarrier) {
-        // Rely on the pResolveString and/or marking to save everything, including temps.
+        // Rely on the pResolveString and marking to save everything we need.
+        // Note that IP0 may be clobbered by saving/restoring the live register (only one thanks
+        // to the custom calling convention) or by marking, so we shall use IP1.
         RegisterSet caller_saves = RegisterSet::Empty();
         InvokeRuntimeCallingConvention calling_convention;
         caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0).GetCode()));
@@ -4628,8 +4678,11 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD
       const DexFile& dex_file = load->GetDexFile();
       const dex::StringIndex string_index = load->GetStringIndex();
       DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+      // We could use IP0 as the marking shall not clobber IP0 if the reference is null and
+      // that's when we need the slow path. But let's not rely on such details and use IP1.
+      Register temp = ip1;
       UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
-      Register temp = temps.AcquireX();
+      temps.Exclude(temp);
       vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
       codegen_->EmitAdrpPlaceholder(adrp_label, temp);
       // Add LDR with its PC-relative String patch.
index e189608..5c4ca5b 100644 (file)
@@ -400,12 +400,30 @@ class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
 
   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
     LocationSummary* locations = instruction_->GetLocations();
+    Location out = locations->Out();
+    constexpr bool call_saves_everything_except_r0 = (!kUseReadBarrier || kUseBakerReadBarrier);
 
     CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
     __ Bind(GetEntryLabel());
     SaveLiveRegisters(codegen, locations);
 
     InvokeRuntimeCallingConventionARMVIXL calling_convention;
+    // For HLoadClass/kBssEntry/kSaveEverything, make sure we preserve the address of the entry.
+    DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
+    bool is_load_class_bss_entry =
+        (cls_ == instruction_) && (cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry);
+    vixl32::Register entry_address;
+    if (is_load_class_bss_entry && call_saves_everything_except_r0) {
+      vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
+      // In the unlucky case that the `temp` is R0, we preserve the address in `out` across
+      // the kSaveEverything call.
+      bool temp_is_r0 = temp.Is(calling_convention.GetRegisterAt(0));
+      entry_address = temp_is_r0 ? RegisterFrom(out) : temp;
+      DCHECK(!entry_address.Is(calling_convention.GetRegisterAt(0)));
+      if (temp_is_r0) {
+        __ Mov(entry_address, temp);
+      }
+    }
     dex::TypeIndex type_index = cls_->GetTypeIndex();
     __ Mov(calling_convention.GetRegisterAt(0), type_index.index_);
     QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
@@ -417,27 +435,28 @@ class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
       CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
     }
 
+    // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
+    if (is_load_class_bss_entry) {
+      if (call_saves_everything_except_r0) {
+        // The class entry address was preserved in `entry_address` thanks to kSaveEverything.
+        __ Str(r0, MemOperand(entry_address));
+      } else {
+        // For non-Baker read barrier, we need to re-calculate the address of the string entry.
+        UseScratchRegisterScope temps(
+            down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
+        vixl32::Register temp = temps.Acquire();
+        CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
+            arm_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
+        arm_codegen->EmitMovwMovtPlaceholder(labels, temp);
+        __ Str(r0, MemOperand(temp));
+      }
+    }
     // Move the class to the desired location.
-    Location out = locations->Out();
     if (out.IsValid()) {
       DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
       arm_codegen->Move32(locations->Out(), LocationFrom(r0));
     }
     RestoreLiveRegisters(codegen, locations);
-    // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
-    DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
-    if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
-      DCHECK(out.IsValid());
-      // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
-      // kSaveEverything and use a temporary for the .bss entry address in the fast path,
-      // so that we can avoid another calculation here.
-      UseScratchRegisterScope temps(down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
-      vixl32::Register temp = temps.Acquire();
-      CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
-          arm_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
-      arm_codegen->EmitMovwMovtPlaceholder(labels, temp);
-      __ Str(OutputRegister(cls_), MemOperand(temp));
-    }
     __ B(GetExitLabel());
   }
 
@@ -462,12 +481,13 @@ class LoadStringSlowPathARMVIXL : public SlowPathCodeARMVIXL {
       : SlowPathCodeARMVIXL(instruction) {}
 
   void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
+    DCHECK(instruction_->IsLoadString());
+    DCHECK_EQ(instruction_->AsLoadString()->GetLoadKind(), HLoadString::LoadKind::kBssEntry);
     LocationSummary* locations = instruction_->GetLocations();
     DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
     HLoadString* load = instruction_->AsLoadString();
     const dex::StringIndex string_index = load->GetStringIndex();
     vixl32::Register out = OutputRegister(load);
-    vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
     constexpr bool call_saves_everything_except_r0 = (!kUseReadBarrier || kUseBakerReadBarrier);
 
     CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
@@ -476,12 +496,16 @@ class LoadStringSlowPathARMVIXL : public SlowPathCodeARMVIXL {
 
     InvokeRuntimeCallingConventionARMVIXL calling_convention;
     // In the unlucky case that the `temp` is R0, we preserve the address in `out` across
-    // the kSaveEverything call (or use `out` for the address after non-kSaveEverything call).
-    bool temp_is_r0 = (temp.Is(calling_convention.GetRegisterAt(0)));
-    vixl32::Register entry_address = temp_is_r0 ? out : temp;
-    DCHECK(!entry_address.Is(calling_convention.GetRegisterAt(0)));
-    if (call_saves_everything_except_r0 && temp_is_r0) {
-      __ Mov(entry_address, temp);
+    // the kSaveEverything call.
+    vixl32::Register entry_address;
+    if (call_saves_everything_except_r0) {
+      vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
+      bool temp_is_r0 = (temp.Is(calling_convention.GetRegisterAt(0)));
+      entry_address = temp_is_r0 ? out : temp;
+      DCHECK(!entry_address.Is(calling_convention.GetRegisterAt(0)));
+      if (temp_is_r0) {
+        __ Mov(entry_address, temp);
+      }
     }
 
     __ Mov(calling_convention.GetRegisterAt(0), string_index.index_);
@@ -494,10 +518,13 @@ class LoadStringSlowPathARMVIXL : public SlowPathCodeARMVIXL {
       __ Str(r0, MemOperand(entry_address));
     } else {
       // For non-Baker read barrier, we need to re-calculate the address of the string entry.
+      UseScratchRegisterScope temps(
+          down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
+      vixl32::Register temp = temps.Acquire();
       CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
           arm_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
-      arm_codegen->EmitMovwMovtPlaceholder(labels, out);
-      __ Str(r0, MemOperand(entry_address));
+      arm_codegen->EmitMovwMovtPlaceholder(labels, temp);
+      __ Str(r0, MemOperand(temp));
     }
 
     arm_codegen->Move32(locations->Out(), LocationFrom(r0));
@@ -5832,6 +5859,7 @@ void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) {
         cls,
         LocationFrom(calling_convention.GetRegisterAt(0)),
         LocationFrom(r0));
+    DCHECK(calling_convention.GetRegisterAt(0).Is(r0));
     return;
   }
   DCHECK(!cls->NeedsAccessCheck());
@@ -5849,6 +5877,22 @@ void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) {
     locations->SetInAt(0, Location::RequiresRegister());
   }
   locations->SetOut(Location::RequiresRegister());
+  if (load_kind == HLoadClass::LoadKind::kBssEntry) {
+    if (!kUseReadBarrier || kUseBakerReadBarrier) {
+      // Rely on the type resolution or initialization and marking to save everything we need.
+      // Note that IP may be clobbered by saving/restoring the live register (only one thanks
+      // to the custom calling convention) or by marking, so we request a different temp.
+      locations->AddTemp(Location::RequiresRegister());
+      RegisterSet caller_saves = RegisterSet::Empty();
+      InvokeRuntimeCallingConventionARMVIXL calling_convention;
+      caller_saves.Add(LocationFrom(calling_convention.GetRegisterAt(0)));
+      // TODO: Add GetReturnLocation() to the calling convention so that we can DCHECK()
+      // that the the kPrimNot result register is the same as the first argument register.
+      locations->SetCustomSlowPathCallerSaves(caller_saves);
+    } else {
+      // For non-Baker read barrier we have a temp-clobbering call.
+    }
+  }
 }
 
 // NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
@@ -5906,10 +5950,13 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) NO_THREAD_
       break;
     }
     case HLoadClass::LoadKind::kBssEntry: {
+      vixl32::Register temp = (!kUseReadBarrier || kUseBakerReadBarrier)
+          ? RegisterFrom(locations->GetTemp(0))
+          : out;
       CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
           codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
-      codegen_->EmitMovwMovtPlaceholder(labels, out);
-      GenerateGcRootFieldLoad(cls, out_loc, out, 0, kCompilerReadBarrierOption);
+      codegen_->EmitMovwMovtPlaceholder(labels, temp);
+      GenerateGcRootFieldLoad(cls, out_loc, temp, /* offset */ 0, read_barrier_option);
       generate_null_check = true;
       break;
     }
@@ -5918,7 +5965,7 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) NO_THREAD_
                                                        cls->GetTypeIndex(),
                                                        cls->GetClass()));
       // /* GcRoot<mirror::Class> */ out = *out
-      GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
+      GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, read_barrier_option);
       break;
     }
     case HLoadClass::LoadKind::kDexCacheViaMethod:
@@ -6012,9 +6059,9 @@ void LocationsBuilderARMVIXL::VisitLoadString(HLoadString* load) {
     locations->SetOut(Location::RequiresRegister());
     if (load_kind == HLoadString::LoadKind::kBssEntry) {
       if (!kUseReadBarrier || kUseBakerReadBarrier) {
-        // Rely on the pResolveString and/or marking to save everything, including temps.
-        // Note that IP may theoretically be clobbered by saving/restoring the live register
-        // (only one thanks to the custom calling convention), so we request a different temp.
+        // Rely on the pResolveString and marking to save everything we need, including temps.
+        // Note that IP may be clobbered by saving/restoring the live register (only one thanks
+        // to the custom calling convention) or by marking, so we request a different temp.
         locations->AddTemp(Location::RequiresRegister());
         RegisterSet caller_saves = RegisterSet::Empty();
         InvokeRuntimeCallingConventionARMVIXL calling_convention;
@@ -6059,7 +6106,9 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) NO_THRE
     }
     case HLoadString::LoadKind::kBssEntry: {
       DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
-      vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
+      vixl32::Register temp = (!kUseReadBarrier || kUseBakerReadBarrier)
+          ? RegisterFrom(locations->GetTemp(0))
+          : out;
       CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
           codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
       codegen_->EmitMovwMovtPlaceholder(labels, temp);
index 137b554..48a82b8 100644 (file)
@@ -6057,6 +6057,7 @@ void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
         cls,
         Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
         Location::RegisterLocation(EAX));
+    DCHECK_EQ(calling_convention.GetRegisterAt(0), EAX);
     return;
   }
   DCHECK(!cls->NeedsAccessCheck());
@@ -6076,6 +6077,17 @@ void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
     locations->SetInAt(0, Location::RequiresRegister());
   }
   locations->SetOut(Location::RequiresRegister());
+  if (load_kind == HLoadClass::LoadKind::kBssEntry) {
+    if (!kUseReadBarrier || kUseBakerReadBarrier) {
+      // Rely on the type resolution and/or initialization to save everything.
+      RegisterSet caller_saves = RegisterSet::Empty();
+      InvokeRuntimeCallingConvention calling_convention;
+      caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
+      locations->SetCustomSlowPathCallerSaves(caller_saves);
+    } else {
+      // For non-Baker read barrier we have a temp-clobbering call.
+    }
+  }
 }
 
 Label* CodeGeneratorX86::NewJitRootClassPatch(const DexFile& dex_file,
@@ -6158,7 +6170,7 @@ void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFE
       Label* fixup_label = codegen_->NewJitRootClassPatch(
           cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
       // /* GcRoot<mirror::Class> */ out = *address
-      GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
+      GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
       break;
     }
     case HLoadClass::LoadKind::kDexCacheViaMethod:
@@ -6250,7 +6262,7 @@ void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
     locations->SetOut(Location::RequiresRegister());
     if (load_kind == HLoadString::LoadKind::kBssEntry) {
       if (!kUseReadBarrier || kUseBakerReadBarrier) {
-        // Rely on the pResolveString and/or marking to save everything.
+        // Rely on the pResolveString to save everything.
         RegisterSet caller_saves = RegisterSet::Empty();
         InvokeRuntimeCallingConvention calling_convention;
         caller_saves.Add(Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
index c5367ce..c71f5e9 100644 (file)
@@ -245,9 +245,8 @@ class LoadClassSlowPathX86_64 : public SlowPathCode {
 
     SaveLiveRegisters(codegen, locations);
 
-    InvokeRuntimeCallingConvention calling_convention;
-    __ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
-            Immediate(cls_->GetTypeIndex().index_));
+    // Custom calling convention: RAX serves as both input and output.
+    __ movl(CpuRegister(RAX), Immediate(cls_->GetTypeIndex().index_));
     x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
                                   instruction_,
                                   dex_pc_,
@@ -5456,10 +5455,10 @@ HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
 void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
   HLoadClass::LoadKind load_kind = cls->GetLoadKind();
   if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
-    InvokeRuntimeCallingConvention calling_convention;
+    // Custom calling convention: RAX serves as both input and output.
     CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
         cls,
-        Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
+        Location::RegisterLocation(RAX),
         Location::RegisterLocation(RAX));
     return;
   }
@@ -5478,6 +5477,17 @@ void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
     locations->SetInAt(0, Location::RequiresRegister());
   }
   locations->SetOut(Location::RequiresRegister());
+  if (load_kind == HLoadClass::LoadKind::kBssEntry) {
+    if (!kUseReadBarrier || kUseBakerReadBarrier) {
+      // Rely on the type resolution and/or initialization to save everything.
+      // Custom calling convention: RAX serves as both input and output.
+      RegisterSet caller_saves = RegisterSet::Empty();
+      caller_saves.Add(Location::RegisterLocation(RAX));
+      locations->SetCustomSlowPathCallerSaves(caller_saves);
+    } else {
+      // For non-Baker read barrier we have a temp-clobbering call.
+    }
+  }
 }
 
 Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
@@ -5553,7 +5563,7 @@ void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_S
       Label* fixup_label =
           codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
       // /* GcRoot<mirror::Class> */ out = *address
-      GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
+      GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
       break;
     }
     default:
@@ -5629,7 +5639,7 @@ void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) {
     locations->SetOut(Location::RequiresRegister());
     if (load->GetLoadKind() == HLoadString::LoadKind::kBssEntry) {
       if (!kUseReadBarrier || kUseBakerReadBarrier) {
-        // Rely on the pResolveString and/or marking to save everything.
+        // Rely on the pResolveString to save everything.
         // Custom calling convention: RAX serves as both input and output.
         RegisterSet caller_saves = RegisterSet::Empty();
         caller_saves.Add(Location::RegisterLocation(RAX));
index a443a40..cfe8406 100644 (file)
@@ -965,9 +965,27 @@ ENTRY \name
 END \name
 .endm
 
-ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+// Macro for string and type resolution and initialization.
+.macro ONE_ARG_SAVE_EVERYTHING_DOWNCALL name, entrypoint
+    .extern \entrypoint
+ENTRY \name
+    SETUP_SAVE_EVERYTHING_FRAME r1    @ save everything in case of GC
+    mov    r1, r9                     @ pass Thread::Current
+    bl     \entrypoint                @ (uint32_t index, Thread*)
+    cbz    r0, 1f                     @ If result is null, deliver the OOME.
+    .cfi_remember_state
+    RESTORE_SAVE_EVERYTHING_FRAME_KEEP_R0
+    bx     lr
+    .cfi_restore_state
+1:
+    DELIVER_PENDING_EXCEPTION_FRAME_READY
+END \name
+.endm
+
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
 
     /*
      * Called by managed code to resolve a static field and load a non-wide value.
@@ -1066,27 +1084,6 @@ ENTRY art_quick_set64_static
     DELIVER_PENDING_EXCEPTION
 END art_quick_set64_static
 
-    /*
-     * Entry from managed code to resolve a string, this stub will
-     * check the dex cache for a matching string (the fast path), and if not found,
-     * it will allocate a String and deliver an exception on error.
-     * On success the String is returned. R0 holds the string index.
-     */
-
-ENTRY art_quick_resolve_string
-    SETUP_SAVE_EVERYTHING_FRAME r1                   @ save everything in case of GC
-    mov    r1, r9                                    @ pass Thread::Current
-    bl     artResolveStringFromCode                  @ (uint32_t type_idx, Thread*)
-    cbz    r0, 1f                                    @ If result is null, deliver the OOME.
-    .cfi_remember_state
-    RESTORE_SAVE_EVERYTHING_FRAME_KEEP_R0
-    bx     lr
-    .cfi_restore_state
-1:
-    DELIVER_PENDING_EXCEPTION_FRAME_READY
-END art_quick_resolve_string
-
-
 // Generate the allocation entrypoints for each allocator.
 GENERATE_ALLOC_ENTRYPOINTS_FOR_NON_TLAB_ALLOCATORS
 // Comment out allocators that have arm specific asm.
@@ -2057,7 +2054,9 @@ ENTRY \name
     beq .Lret_forwarding_address\name
 
 .Lslow_rb_\name:
-    // Save IP: the kSaveEverything entrypoint art_quick_resolve_string makes a tail call here.
+    // Save IP: The kSaveEverything entrypoint art_quick_resolve_string used to
+    // make a tail call here. Currently, it serves only for stack alignment but
+    // we may reintroduce kSaveEverything calls here in the future.
     push  {r0-r4, r9, ip, lr}           @ save return address, core caller-save registers and ip
     .cfi_adjust_cfa_offset 32
     .cfi_rel_offset r0, 0
index 219d8b4..bfbe481 100644 (file)
@@ -1553,6 +1553,24 @@ ENTRY \name
 END \name
 .endm
 
+// Macro for string and type resolution and initialization.
+.macro ONE_ARG_SAVE_EVERYTHING_DOWNCALL name, entrypoint
+    .extern \entrypoint
+ENTRY \name
+    SETUP_SAVE_EVERYTHING_FRAME       // save everything for stack crawl
+    mov   x1, xSELF                   // pass Thread::Current
+    bl    \entrypoint                 // (int32_t index, Thread* self)
+    cbz   w0, 1f                      // If result is null, deliver the OOME.
+    .cfi_remember_state
+    RESTORE_SAVE_EVERYTHING_FRAME_KEEP_X0
+    ret                        // return
+    .cfi_restore_state
+    .cfi_def_cfa_offset FRAME_SIZE_SAVE_EVERYTHING  // workaround for clang bug: 31975598
+1:
+    DELIVER_PENDING_EXCEPTION_FRAME_READY
+END \name
+.endm
+
 .macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
     cbz w0, 1f                 // result zero branch over
     ret                        // return
@@ -1571,10 +1589,11 @@ TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode,
      * initializer and deliver the exception on error. On success the static storage base is
      * returned.
      */
-ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
 
-ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
 
 ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
 ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_OR_DELIVER_PENDING_EXCEPTION_X1
@@ -1604,27 +1623,6 @@ THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCompiledCod
 THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCompiledCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
 THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCompiledCode, RETURN_IF_W0_IS_ZERO_OR_DELIVER
 
-    /*
-     * Entry from managed code to resolve a string, this stub will
-     * check the dex cache for a matching string (the fast path), and if not found,
-     * it will allocate a String and deliver an exception on error.
-     * On success the String is returned. R0 holds the string index.
-     */
-
-ENTRY art_quick_resolve_string
-    SETUP_SAVE_EVERYTHING_FRAME                     // save everything for stack crawl
-    mov   x1, xSELF                                 // pass Thread::Current
-    bl    artResolveStringFromCode                  // (int32_t string_idx, Thread* self)
-    cbz   w0, 1f                                    // If result is null, deliver the OOME.
-    .cfi_remember_state
-    RESTORE_SAVE_EVERYTHING_FRAME_KEEP_X0
-    ret                        // return
-    .cfi_restore_state
-    .cfi_def_cfa_offset FRAME_SIZE_SAVE_EVERYTHING  // workaround for clang bug: 31975598
-1:
-    DELIVER_PENDING_EXCEPTION_FRAME_READY
-END art_quick_resolve_string
-
 // Generate the allocation entrypoints for each allocator.
 GENERATE_ALLOC_ENTRYPOINTS_FOR_NON_TLAB_ALLOCATORS
 // Comment out allocators that have arm64 specific asm.
@@ -2380,13 +2378,6 @@ END art_quick_indexof
 ENTRY \name
     // Reference is null, no work to do at all.
     cbz \wreg, .Lret_rb_\name
-    /*
-     * Allocate 46 stack slots * 8 = 368 bytes:
-     * - 20 slots for core registers X0-X19
-     * - 24 slots for floating-point registers D0-D7 and D16-D31
-     * -  1 slot for return address register XLR
-     * -  1 padding slot for 16-byte stack alignment
-     */
     // Use wIP0 as temp and check the mark bit of the reference. wIP0 is not used by the compiler.
     ldr   wIP0, [\xreg, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
     tbz   wIP0, #LOCK_WORD_MARK_BIT_SHIFT, .Lnot_marked_rb_\name
@@ -2398,10 +2389,15 @@ ENTRY \name
     cmp wzr, wIP0, lsr #30
     beq .Lret_forwarding_address\name
 .Lslow_rb_\name:
-    // We must not clobber IP0 since art_quick_resolve_string makes a tail call here and relies on
-    // IP0 being restored.
+    /*
+     * Allocate 44 stack slots * 8 = 352 bytes:
+     * - 20 slots for core registers X0-15, X17-X19, LR
+     * - 24 slots for floating-point registers D0-D7 and D16-D31
+     */
+    // We must not clobber IP1 since code emitted for HLoadClass and HLoadString
+    // relies on IP1 being preserved.
     // Save all potentially live caller-save core registers.
-    SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 368
+    SAVE_TWO_REGS_INCREASE_FRAME x0, x1, 352
     SAVE_TWO_REGS  x2,  x3, 16
     SAVE_TWO_REGS  x4,  x5, 32
     SAVE_TWO_REGS  x6,  x7, 48
@@ -2409,8 +2405,8 @@ ENTRY \name
     SAVE_TWO_REGS x10, x11, 80
     SAVE_TWO_REGS x12, x13, 96
     SAVE_TWO_REGS x14, x15, 112
-    SAVE_TWO_REGS x16, x17, 128
-    SAVE_TWO_REGS x18, x19, 144
+    SAVE_TWO_REGS x17, x18, 128  // Skip x16, i.e. IP0.
+    SAVE_TWO_REGS x19, xLR, 144  // Save also return address.
     // Save all potentially live caller-save floating-point registers.
     stp   d0, d1,   [sp, #160]
     stp   d2, d3,   [sp, #176]
@@ -2424,9 +2420,6 @@ ENTRY \name
     stp   d26, d27, [sp, #304]
     stp   d28, d29, [sp, #320]
     stp   d30, d31, [sp, #336]
-    // Save return address.
-    // (sp + #352 is a padding slot)
-    SAVE_REG xLR, 360
 
     .ifnc \wreg, w0
       mov   w0, \wreg                   // Pass arg1 - obj from `wreg`
@@ -2446,8 +2439,8 @@ ENTRY \name
     POP_REGS_NE x10, x11, 80,  \xreg
     POP_REGS_NE x12, x13, 96,  \xreg
     POP_REGS_NE x14, x15, 112, \xreg
-    POP_REGS_NE x16, x17, 128, \xreg
-    POP_REGS_NE x18, x19, 144, \xreg
+    POP_REGS_NE x17, x18, 128, \xreg
+    POP_REGS_NE x19, xLR, 144, \xreg  // Restore also return address.
     // Restore floating-point registers.
     ldp   d0, d1,   [sp, #160]
     ldp   d2, d3,   [sp, #176]
@@ -2461,9 +2454,8 @@ ENTRY \name
     ldp   d26, d27, [sp, #304]
     ldp   d28, d29, [sp, #320]
     ldp   d30, d31, [sp, #336]
-    // Restore return address and remove padding.
-    RESTORE_REG xLR, 360
-    DECREASE_FRAME 368
+    // Remove frame and return.
+    DECREASE_FRAME 352
     ret
 .Lret_forwarding_address\name:
     mvn wIP0, wIP0
index 76615e8..8c907e0 100644 (file)
@@ -922,6 +922,31 @@ MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
     END_FUNCTION VAR(c_name)
 END_MACRO
 
+// Macro for string and type resolution and initialization.
+MACRO2(ONE_ARG_SAVE_EVERYTHING_DOWNCALL, c_name, cxx_name)
+    DEFINE_FUNCTION VAR(c_name)
+    SETUP_SAVE_EVERYTHING_FRAME ebx, ebx              // save ref containing registers for GC
+    // Outgoing argument set up
+    subl MACRO_LITERAL(8), %esp                       // push padding
+    CFI_ADJUST_CFA_OFFSET(8)
+    pushl %fs:THREAD_SELF_OFFSET                      // pass Thread::Current()
+    CFI_ADJUST_CFA_OFFSET(4)
+    PUSH eax                                          // pass arg1
+    call CALLVAR(cxx_name)                            // cxx_name(arg1, Thread*)
+    addl MACRO_LITERAL(16), %esp                      // pop arguments
+    CFI_ADJUST_CFA_OFFSET(-16)
+    testl %eax, %eax                                  // If result is null, deliver the OOME.
+    jz 1f
+    CFI_REMEMBER_STATE
+    RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX            // restore frame up to return address
+    ret                                               // return
+    CFI_RESTORE_STATE
+    CFI_DEF_CFA(esp, FRAME_SIZE_SAVE_EVERYTHING)      // workaround for clang bug: 31975598
+1:
+    DELIVER_PENDING_EXCEPTION_FRAME_READY
+    END_FUNCTION VAR(c_name)
+END_MACRO
+
 MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER)
     testl %eax, %eax               // eax == 0 ?
     jz  1f                         // if eax == 0 goto 1
@@ -1245,31 +1270,10 @@ GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_tlab, artAllocArrayFr
 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_32
 GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_64
 
-DEFINE_FUNCTION art_quick_resolve_string
-    SETUP_SAVE_EVERYTHING_FRAME ebx, ebx
-    // Outgoing argument set up
-    subl LITERAL(8), %esp                                 // push padding
-    CFI_ADJUST_CFA_OFFSET(8)
-    pushl %fs:THREAD_SELF_OFFSET                          // pass Thread::Current()
-    CFI_ADJUST_CFA_OFFSET(4)
-    PUSH eax                                              // pass arg1
-    call SYMBOL(artResolveStringFromCode)
-    addl LITERAL(16), %esp                                // pop arguments
-    CFI_ADJUST_CFA_OFFSET(-16)
-    testl %eax, %eax                                      // If result is null, deliver the OOME.
-    jz 1f
-    CFI_REMEMBER_STATE
-    RESTORE_SAVE_EVERYTHING_FRAME_KEEP_EAX
-    ret
-    CFI_RESTORE_STATE
-    CFI_DEF_CFA(esp, FRAME_SIZE_SAVE_EVERYTHING)  // workaround for clang bug: 31975598
-1:
-    DELIVER_PENDING_EXCEPTION_FRAME_READY
-END_FUNCTION art_quick_resolve_string
-
-ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
 
 TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
 
index a1ae858..f1be52e 100644 (file)
@@ -950,6 +950,26 @@ MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
     END_FUNCTION VAR(c_name)
 END_MACRO
 
+// Macro for string and type resolution and initialization.
+MACRO2(ONE_ARG_SAVE_EVERYTHING_DOWNCALL, c_name, cxx_name)
+    DEFINE_FUNCTION VAR(c_name)
+    SETUP_SAVE_EVERYTHING_FRAME                   // save everything for GC
+    // Outgoing argument set up
+    movl %eax, %edi                               // pass string index
+    movq %gs:THREAD_SELF_OFFSET, %rsi             // pass Thread::Current()
+    call CALLVAR(cxx_name)                        // cxx_name(arg0, Thread*)
+    testl %eax, %eax                              // If result is null, deliver the OOME.
+    jz 1f
+    CFI_REMEMBER_STATE
+    RESTORE_SAVE_EVERYTHING_FRAME_KEEP_RAX        // restore frame up to return address
+    ret
+    CFI_RESTORE_STATE
+    CFI_DEF_CFA(rsp, FRAME_SIZE_SAVE_EVERYTHING)  // workaround for clang bug: 31975598
+1:
+    DELIVER_PENDING_EXCEPTION_FRAME_READY
+    END_FUNCTION VAR(c_name)
+END_MACRO
+
 MACRO0(RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER)
     testq %rax, %rax               // rax == 0 ?
     jz  1f                         // if rax == 0 goto 1
@@ -1270,27 +1290,10 @@ DEFINE_FUNCTION art_quick_alloc_object_initialized_region_tlab
     ALLOC_OBJECT_TLAB_SLOW_PATH artAllocObjectFromCodeInitializedRegionTLAB
 END_FUNCTION art_quick_alloc_object_initialized_region_tlab
 
-DEFINE_FUNCTION art_quick_resolve_string
-    SETUP_SAVE_EVERYTHING_FRAME
-    // Outgoing argument set up
-    movl %eax, %edi                             // pass string index
-    movq %gs:THREAD_SELF_OFFSET, %rsi           // pass Thread::Current()
-    call SYMBOL(artResolveStringFromCode)       // artResolveStringFromCode(arg0, Thread*)
-
-    testl %eax, %eax                            // If result is null, deliver the OOME.
-    jz 1f
-    CFI_REMEMBER_STATE
-    RESTORE_SAVE_EVERYTHING_FRAME_KEEP_RAX      // restore frame up to return address
-    ret
-    CFI_RESTORE_STATE
-    CFI_DEF_CFA(rsp, FRAME_SIZE_SAVE_EVERYTHING)  // workaround for clang bug: 31975598
-1:
-    DELIVER_PENDING_EXCEPTION_FRAME_READY
-END_FUNCTION art_quick_resolve_string
-
-ONE_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
-ONE_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode
+ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode
 
 TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
 
index 5b1b287..699cf91 100644 (file)
@@ -53,13 +53,18 @@ static inline void BssWriteBarrier(ArtMethod* outer_method) REQUIRES_SHARED(Lock
   }
 }
 
+constexpr Runtime::CalleeSaveType kInitEntrypointSaveType =
+    // TODO: Change allocation entrypoints on MIPS and MIPS64 to kSaveEverything.
+    (kRuntimeISA == kMips || kRuntimeISA == kMips64) ? Runtime::kSaveRefsOnly
+                                                     : Runtime::kSaveEverything;
+
 extern "C" mirror::Class* artInitializeStaticStorageFromCode(uint32_t type_idx, Thread* self)
     REQUIRES_SHARED(Locks::mutator_lock_) {
   // Called to ensure static storage base is initialized for direct static field reads and writes.
   // A class may be accessing another class' fields when it doesn't have access, as access has been
   // given by inheritance.
   ScopedQuickEntrypointChecks sqec(self);
-  auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, Runtime::kSaveRefsOnly);
+  auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, kInitEntrypointSaveType);
   ArtMethod* caller = caller_and_outer.caller;
   mirror::Class* result =
       ResolveVerifyAndClinit(dex::TypeIndex(type_idx), caller, self, true, false);
@@ -73,7 +78,7 @@ extern "C" mirror::Class* artInitializeTypeFromCode(uint32_t type_idx, Thread* s
     REQUIRES_SHARED(Locks::mutator_lock_) {
   // Called when method->dex_cache_resolved_types_[] misses.
   ScopedQuickEntrypointChecks sqec(self);
-  auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, Runtime::kSaveRefsOnly);
+  auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, kInitEntrypointSaveType);
   ArtMethod* caller = caller_and_outer.caller;
   mirror::Class* result =
       ResolveVerifyAndClinit(dex::TypeIndex(type_idx), caller, self, false, false);
@@ -88,7 +93,7 @@ extern "C" mirror::Class* artInitializeTypeAndVerifyAccessFromCode(uint32_t type
   // Called when caller isn't guaranteed to have access to a type and the dex cache may be
   // unpopulated.
   ScopedQuickEntrypointChecks sqec(self);
-  auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, Runtime::kSaveRefsOnly);
+  auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, kInitEntrypointSaveType);
   ArtMethod* caller = caller_and_outer.caller;
   mirror::Class* result =
       ResolveVerifyAndClinit(dex::TypeIndex(type_idx), caller, self, false, true);
@@ -101,11 +106,7 @@ extern "C" mirror::Class* artInitializeTypeAndVerifyAccessFromCode(uint32_t type
 extern "C" mirror::String* artResolveStringFromCode(int32_t string_idx, Thread* self)
     REQUIRES_SHARED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
-  auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(
-      self,
-      // TODO: Change art_quick_resolve_string on MIPS and MIPS64 to kSaveEverything.
-      (kRuntimeISA == kMips || kRuntimeISA == kMips64) ? Runtime::kSaveRefsOnly
-                                                       : Runtime::kSaveEverything);
+  auto caller_and_outer = GetCalleeSaveMethodCallerAndOuterMethod(self, kInitEntrypointSaveType);
   ArtMethod* caller = caller_and_outer.caller;
   mirror::String* result = ResolveStringFromCode(caller, dex::StringIndex(string_idx));
   if (LIKELY(result != nullptr)) {