2 * Copyright (C) 2014 The Android Open Source Project
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #include "optimizing_compiler.h"
23 #include "code_generator.h"
25 #include "driver/compiler_driver.h"
26 #include "driver/dex_compilation_unit.h"
27 #include "graph_visualizer.h"
29 #include "register_allocator.h"
30 #include "ssa_phi_elimination.h"
31 #include "ssa_liveness_analysis.h"
32 #include "utils/arena_allocator.h"
37 * Used by the code generator, to allocate the code in a vector.
39 class CodeVectorAllocator FINAL : public CodeAllocator {
41 CodeVectorAllocator() { }
43 virtual uint8_t* Allocate(size_t size) {
49 size_t GetSize() const { return size_; }
50 const std::vector<uint8_t>& GetMemory() const { return memory_; }
53 std::vector<uint8_t> memory_;
56 DISALLOW_COPY_AND_ASSIGN(CodeVectorAllocator);
60 * If set to true, generates a file suitable for the c1visualizer tool and IRHydra.
62 static bool kIsVisualizerEnabled = false;
65 * Filter to apply to the visualizer. Methods whose name contain that filter will
68 static const char* kStringFilter = "";
70 class OptimizingCompiler FINAL : public Compiler {
72 explicit OptimizingCompiler(CompilerDriver* driver);
74 bool CanCompileMethod(uint32_t method_idx, const DexFile& dex_file, CompilationUnit* cu) const
77 CompiledMethod* Compile(const DexFile::CodeItem* code_item,
78 uint32_t access_flags,
79 InvokeType invoke_type,
80 uint16_t class_def_idx,
83 const DexFile& dex_file) const OVERRIDE;
85 CompiledMethod* TryCompile(const DexFile::CodeItem* code_item,
86 uint32_t access_flags,
87 InvokeType invoke_type,
88 uint16_t class_def_idx,
91 const DexFile& dex_file) const;
93 // For the following methods we will use the fallback. This is a delegation pattern.
94 CompiledMethod* JniCompile(uint32_t access_flags,
96 const DexFile& dex_file) const OVERRIDE;
98 uintptr_t GetEntryPointOf(mirror::ArtMethod* method) const OVERRIDE
99 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
101 bool WriteElf(art::File* file,
102 OatWriter* oat_writer,
103 const std::vector<const art::DexFile*>& dex_files,
104 const std::string& android_root,
105 bool is_host) const OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
107 Backend* GetCodeGenerator(CompilationUnit* cu, void* compilation_unit) const OVERRIDE;
109 void InitCompilationUnit(CompilationUnit& cu) const OVERRIDE;
111 void Init() const OVERRIDE;
113 void UnInit() const OVERRIDE;
116 std::unique_ptr<std::ostream> visualizer_output_;
118 // Delegate to another compiler in case the optimizing compiler cannot compile a method.
119 // Currently the fallback is the quick compiler.
120 std::unique_ptr<Compiler> delegate_;
122 DISALLOW_COPY_AND_ASSIGN(OptimizingCompiler);
125 OptimizingCompiler::OptimizingCompiler(CompilerDriver* driver) : Compiler(driver, 100),
126 delegate_(Create(driver, Compiler::Kind::kQuick)) {
127 if (kIsVisualizerEnabled) {
128 visualizer_output_.reset(new std::ofstream("art.cfg"));
132 void OptimizingCompiler::Init() const {
136 void OptimizingCompiler::UnInit() const {
140 bool OptimizingCompiler::CanCompileMethod(uint32_t method_idx, const DexFile& dex_file,
141 CompilationUnit* cu) const {
142 return delegate_->CanCompileMethod(method_idx, dex_file, cu);
145 CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags,
147 const DexFile& dex_file) const {
148 return delegate_->JniCompile(access_flags, method_idx, dex_file);
151 uintptr_t OptimizingCompiler::GetEntryPointOf(mirror::ArtMethod* method) const {
152 return delegate_->GetEntryPointOf(method);
155 bool OptimizingCompiler::WriteElf(art::File* file, OatWriter* oat_writer,
156 const std::vector<const art::DexFile*>& dex_files,
157 const std::string& android_root, bool is_host) const {
158 return delegate_->WriteElf(file, oat_writer, dex_files, android_root, is_host);
161 Backend* OptimizingCompiler::GetCodeGenerator(CompilationUnit* cu, void* compilation_unit) const {
162 return delegate_->GetCodeGenerator(cu, compilation_unit);
165 void OptimizingCompiler::InitCompilationUnit(CompilationUnit& cu) const {
166 delegate_->InitCompilationUnit(cu);
169 CompiledMethod* OptimizingCompiler::TryCompile(const DexFile::CodeItem* code_item,
170 uint32_t access_flags,
171 InvokeType invoke_type,
172 uint16_t class_def_idx,
174 jobject class_loader,
175 const DexFile& dex_file) const {
176 InstructionSet instruction_set = GetCompilerDriver()->GetInstructionSet();
177 // Always use the thumb2 assembler: some runtime functionality (like implicit stack
178 // overflow checks) assume thumb2.
179 if (instruction_set == kArm) {
180 instruction_set = kThumb2;
183 // Do not attempt to compile on architectures we do not support.
184 if (instruction_set != kX86 && instruction_set != kX86_64 && instruction_set != kThumb2) {
188 DexCompilationUnit dex_compilation_unit(
189 nullptr, class_loader, art::Runtime::Current()->GetClassLinker(), dex_file, code_item,
190 class_def_idx, method_idx, access_flags,
191 GetCompilerDriver()->GetVerifiedMethod(&dex_file, method_idx));
193 // For testing purposes, we put a special marker on method names that should be compiled
194 // with this compiler. This makes sure we're not regressing.
195 bool shouldCompile = dex_compilation_unit.GetSymbol().find("00024opt_00024") != std::string::npos;
196 bool shouldOptimize =
197 dex_compilation_unit.GetSymbol().find("00024reg_00024") != std::string::npos;
200 ArenaAllocator arena(&pool);
201 HGraphBuilder builder(&arena, &dex_compilation_unit, &dex_file, GetCompilerDriver());
203 HGraph* graph = builder.BuildGraph(*code_item);
204 if (graph == nullptr) {
206 LOG(FATAL) << "Could not build graph in optimizing compiler";
211 CodeGenerator* codegen = CodeGenerator::Create(&arena, graph, instruction_set);
212 if (codegen == nullptr) {
214 LOG(FATAL) << "Could not find code generator for optimizing compiler";
219 HGraphVisualizer visualizer(
220 visualizer_output_.get(), graph, kStringFilter, *codegen, dex_compilation_unit);
221 visualizer.DumpGraph("builder");
223 CodeVectorAllocator allocator;
225 if (RegisterAllocator::CanAllocateRegistersFor(*graph, instruction_set)) {
226 graph->BuildDominatorTree();
227 graph->TransformToSSA();
228 visualizer.DumpGraph("ssa");
229 graph->FindNaturalLoops();
231 SsaRedundantPhiElimination(graph).Run();
232 SsaDeadPhiElimination(graph).Run();
234 SsaLivenessAnalysis liveness(*graph, codegen);
236 visualizer.DumpGraph(kLivenessPassName);
238 RegisterAllocator register_allocator(graph->GetArena(), codegen, liveness);
239 register_allocator.AllocateRegisters();
241 visualizer.DumpGraph(kRegisterAllocatorPassName);
242 codegen->CompileOptimized(&allocator);
243 } else if (shouldOptimize && RegisterAllocator::Supports(instruction_set)) {
244 LOG(FATAL) << "Could not allocate registers in optimizing compiler";
246 codegen->CompileBaseline(&allocator);
248 // Run these phases to get some test coverage.
249 graph->BuildDominatorTree();
250 graph->TransformToSSA();
251 visualizer.DumpGraph("ssa");
252 graph->FindNaturalLoops();
253 SsaLivenessAnalysis liveness(*graph, codegen);
255 visualizer.DumpGraph(kLivenessPassName);
258 std::vector<uint8_t> mapping_table;
259 SrcMap src_mapping_table;
260 codegen->BuildMappingTable(&mapping_table,
261 GetCompilerDriver()->GetCompilerOptions().GetIncludeDebugSymbols() ?
262 &src_mapping_table : nullptr);
263 std::vector<uint8_t> vmap_table;
264 codegen->BuildVMapTable(&vmap_table);
265 std::vector<uint8_t> gc_map;
266 codegen->BuildNativeGCMap(&gc_map, dex_compilation_unit);
268 return new CompiledMethod(GetCompilerDriver(),
270 allocator.GetMemory(),
271 codegen->GetFrameSize(),
272 codegen->GetCoreSpillMask(),
273 0, /* FPR spill mask, unused */
281 CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item,
282 uint32_t access_flags,
283 InvokeType invoke_type,
284 uint16_t class_def_idx,
286 jobject class_loader,
287 const DexFile& dex_file) const {
288 CompiledMethod* method = TryCompile(code_item, access_flags, invoke_type, class_def_idx,
289 method_idx, class_loader, dex_file);
290 if (method != nullptr) {
294 return delegate_->Compile(code_item, access_flags, invoke_type, class_def_idx, method_idx,
295 class_loader, dex_file);
298 Compiler* CreateOptimizingCompiler(CompilerDriver* driver) {
299 return new OptimizingCompiler(driver);