OSDN Git Service

ART: Enable more Clang warnings
authorAndreas Gampe <agampe@google.com>
Mon, 6 Apr 2015 16:35:22 +0000 (09:35 -0700)
committerAndreas Gampe <agampe@google.com>
Mon, 6 Apr 2015 21:05:07 +0000 (14:05 -0700)
Change-Id: Ie6aba02f4223b1de02530e1515c63505f37e184c

27 files changed:
build/Android.common_build.mk
compiler/dex/gvn_dead_code_elimination.cc
compiler/dex/quick/arm64/utility_arm64.cc
compiler/dex/quick/mips/utility_mips.cc
compiler/linker/relative_patcher.cc
compiler/optimizing/bounds_check_elimination.cc
compiler/optimizing/code_generator.cc
compiler/optimizing/code_generator_arm.cc
compiler/optimizing/code_generator_x86.cc
compiler/optimizing/code_generator_x86_64.cc
compiler/optimizing/intrinsics.cc
compiler/utils/arm/assembler_arm.cc
compiler/utils/arm/assembler_thumb2.cc
compiler/utils/arm64/assembler_arm64.h
disassembler/disassembler_mips.cc
disassembler/disassembler_mips64.cc
runtime/base/macros.h
runtime/entrypoints/quick/quick_deoptimization_entrypoints.cc
runtime/entrypoints/quick/quick_throw_entrypoints.cc
runtime/gc/collector/concurrent_copying.cc
runtime/gc/collector/concurrent_copying.h
runtime/gc/heap.cc
runtime/gc/task_processor.cc
runtime/mirror/object.h
runtime/parsed_options.cc
runtime/quick_exception_handler.h
runtime/verifier/method_verifier.cc

index 8b4be82..8eeeec6 100644 (file)
@@ -141,6 +141,9 @@ art_clang_cflags += -Wused-but-marked-unused
 # Enable warning for deprecated language features.
 art_clang_cflags += -Wdeprecated
 
+# Enable warning for unreachable break & return, and missing NO_RETURN annotations.
+art_clang_cflags += -Wunreachable-code-break -Wunreachable-code-return -Wmissing-noreturn
+
 
 # GCC-only warnings.
 art_gcc_cflags := -Wunused-but-set-parameter
index 2d4c18f..ec12221 100644 (file)
@@ -1357,7 +1357,6 @@ bool GvnDeadCodeElimination::RecordMIR(MIR* mir) {
     default:
       LOG(FATAL) << "Unexpected opcode: " << opcode;
       UNREACHABLE();
-      break;
   }
 
   if (mir->ssa_rep->num_defs != 0) {
index f48290d..e9ad8ba 100644 (file)
@@ -589,13 +589,11 @@ LIR* Arm64Mir2Lir::OpRegRegShift(OpKind op, RegStorage r_dest_src1, RegStorage r
       DCHECK_EQ(shift, 0);
       // Binary, but rm is encoded twice.
       return NewLIR2(kA64Rev2rr | wide, r_dest_src1.GetReg(), r_src2.GetReg());
-      break;
     case kOpRevsh:
       // Binary, but rm is encoded twice.
       NewLIR2(kA64Rev162rr | wide, r_dest_src1.GetReg(), r_src2.GetReg());
       // "sxth r1, r2" is "sbfm r1, r2, #0, #15"
       return NewLIR4(kA64Sbfm4rrdd | wide, r_dest_src1.GetReg(), r_dest_src1.GetReg(), 0, 15);
-      break;
     case kOp2Byte:
       DCHECK_EQ(shift, ENCODE_NO_SHIFT);
       // "sbfx r1, r2, #imm1, #imm2" is "sbfm r1, r2, #imm1, #(imm1 + imm2 - 1)".
@@ -645,10 +643,9 @@ LIR* Arm64Mir2Lir::OpRegRegExtend(OpKind op, RegStorage r_dest_src1, RegStorage
       // Note: intentional fallthrough
     case kOpSub:
       return OpRegRegRegExtend(op, r_dest_src1, r_dest_src1, r_src2, ext, amount);
-      break;
     default:
       LOG(FATAL) << "Bad Opcode: " << opcode;
-      break;
+      UNREACHABLE();
   }
 
   DCHECK(!IsPseudoLirOp(opcode));
index bf0e0fc..8ab5422 100644 (file)
@@ -283,9 +283,9 @@ LIR* MipsMir2Lir::OpReg(OpKind op, RegStorage r_dest_src) {
       break;
     case kOpBx:
       return NewLIR2(kMipsJalr, rZERO, r_dest_src.GetReg());
-      break;
     default:
       LOG(FATAL) << "Bad case in OpReg";
+      UNREACHABLE();
   }
   return NewLIR2(opcode, cu_->target64 ? rRAd : rRA, r_dest_src.GetReg());
 }
@@ -295,8 +295,8 @@ LIR* MipsMir2Lir::OpRegImm(OpKind op, RegStorage r_dest_src1, int value) {
     return OpRegRegImm(op, r_dest_src1, r_dest_src1, value);
   } else {
     LOG(FATAL) << "Bad case in OpRegImm";
+    UNREACHABLE();
   }
-  UNREACHABLE();
 }
 
 LIR* MipsMir2Lir::OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2) {
index 8ee87aa..89aed95 100644 (file)
@@ -67,22 +67,17 @@ std::unique_ptr<RelativePatcher> RelativePatcher::Create(
   switch (instruction_set) {
     case kX86:
       return std::unique_ptr<RelativePatcher>(new X86RelativePatcher());
-      break;
     case kX86_64:
       return std::unique_ptr<RelativePatcher>(new X86_64RelativePatcher());
-      break;
     case kArm:
       // Fall through: we generate Thumb2 code for "arm".
     case kThumb2:
       return std::unique_ptr<RelativePatcher>(new Thumb2RelativePatcher(provider));
-      break;
     case kArm64:
       return std::unique_ptr<RelativePatcher>(
           new Arm64RelativePatcher(provider, features->AsArm64InstructionSetFeatures()));
-      break;
     default:
       return std::unique_ptr<RelativePatcher>(new RelativePatcherNone);
-      break;
   }
 }
 
index dce02f7..6511120 100644 (file)
@@ -239,7 +239,6 @@ class ValueBound : public ValueObject {
       *underflow = true;
       return Min();
     }
-    return ValueBound(instruction_, new_constant);
   }
 
  private:
index 9b1ef17..da28dc7 100644 (file)
@@ -132,7 +132,6 @@ size_t CodeGenerator::FindFreeEntry(bool* array, size_t length) {
   }
   LOG(FATAL) << "Could not find a register in baseline register allocator";
   UNREACHABLE();
-  return -1;
 }
 
 size_t CodeGenerator::FindTwoFreeConsecutiveAlignedEntries(bool* array, size_t length) {
@@ -145,7 +144,6 @@ size_t CodeGenerator::FindTwoFreeConsecutiveAlignedEntries(bool* array, size_t l
   }
   LOG(FATAL) << "Could not find a register in baseline register allocator";
   UNREACHABLE();
-  return -1;
 }
 
 void CodeGenerator::InitializeCodeGeneration(size_t number_of_spill_slots,
index f5e4df1..cfc798a 100644 (file)
@@ -562,7 +562,6 @@ Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
     case Primitive::kPrimLong:
     case Primitive::kPrimDouble:
       return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
-      break;
 
     case Primitive::kPrimInt:
     case Primitive::kPrimNot:
@@ -575,10 +574,11 @@ Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const {
     case Primitive::kPrimShort:
     case Primitive::kPrimVoid:
       LOG(FATAL) << "Unexpected type " << load->GetType();
+      UNREACHABLE();
   }
 
   LOG(FATAL) << "Unreachable";
-  return Location();
+  UNREACHABLE();
 }
 
 Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
@@ -683,7 +683,6 @@ Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type ty
       return Location();
   }
   UNREACHABLE();
-  return Location();
 }
 
 void CodeGeneratorARM::Move32(Location destination, Location source) {
index f79dbc3..92b62e2 100644 (file)
@@ -514,7 +514,6 @@ Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
     case Primitive::kPrimLong:
     case Primitive::kPrimDouble:
       return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
-      break;
 
     case Primitive::kPrimInt:
     case Primitive::kPrimNot:
@@ -527,10 +526,11 @@ Location CodeGeneratorX86::GetStackLocation(HLoadLocal* load) const {
     case Primitive::kPrimShort:
     case Primitive::kPrimVoid:
       LOG(FATAL) << "Unexpected type " << load->GetType();
+      UNREACHABLE();
   }
 
   LOG(FATAL) << "Unreachable";
-  return Location();
+  UNREACHABLE();
 }
 
 Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) {
index 9958451..cdbc778 100644 (file)
@@ -555,7 +555,6 @@ Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
     case Primitive::kPrimLong:
     case Primitive::kPrimDouble:
       return Location::DoubleStackSlot(GetStackSlot(load->GetLocal()));
-      break;
 
     case Primitive::kPrimInt:
     case Primitive::kPrimNot:
@@ -568,10 +567,11 @@ Location CodeGeneratorX86_64::GetStackLocation(HLoadLocal* load) const {
     case Primitive::kPrimShort:
     case Primitive::kPrimVoid:
       LOG(FATAL) << "Unexpected type " << load->GetType();
+      UNREACHABLE();
   }
 
   LOG(FATAL) << "Unreachable";
-  return Location();
+  UNREACHABLE();
 }
 
 void CodeGeneratorX86_64::Move(Location destination, Location source) {
index 628a844..20aa45f 100644 (file)
@@ -90,7 +90,6 @@ static Intrinsics GetIntrinsic(InlineMethod method) {
           LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
           UNREACHABLE();
       }
-      break;
     case kIntrinsicReverseBytes:
       switch (GetType(method.d.data, true)) {
         case Primitive::kPrimShort:
@@ -103,7 +102,6 @@ static Intrinsics GetIntrinsic(InlineMethod method) {
           LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
           UNREACHABLE();
       }
-      break;
 
     // Abs.
     case kIntrinsicAbsDouble:
@@ -166,7 +164,6 @@ static Intrinsics GetIntrinsic(InlineMethod method) {
           LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
           UNREACHABLE();
       }
-      break;
 
     // Memory.poke.
     case kIntrinsicPoke:
@@ -183,7 +180,6 @@ static Intrinsics GetIntrinsic(InlineMethod method) {
           LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
           UNREACHABLE();
       }
-      break;
 
     // String.
     case kIntrinsicCharAt:
@@ -211,7 +207,6 @@ static Intrinsics GetIntrinsic(InlineMethod method) {
           LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
           UNREACHABLE();
       }
-      break;
     case kIntrinsicUnsafeGet: {
       const bool is_volatile = (method.d.data & kIntrinsicFlagIsVolatile);
       switch (GetType(method.d.data, false)) {
@@ -225,7 +220,6 @@ static Intrinsics GetIntrinsic(InlineMethod method) {
           LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
           UNREACHABLE();
       }
-      break;
     }
     case kIntrinsicUnsafePut: {
       enum Sync { kNoSync, kVolatile, kOrdered };
index a02191b..8059289 100644 (file)
@@ -89,7 +89,6 @@ uint32_t ShifterOperand::encodingArm() const {
       } else {
         return immed_;
       }
-      break;
     case kRegister:
       if (is_shift_) {
         uint32_t shift_type;
@@ -121,7 +120,6 @@ uint32_t ShifterOperand::encodingArm() const {
         // Simple register
         return static_cast<uint32_t>(rm_);
       }
-      break;
     default:
       // Can't get here.
       LOG(FATAL) << "Invalid shifter operand for ARM";
@@ -156,13 +154,11 @@ uint32_t ShifterOperand::encodingThumb() const {
         // Simple register
         return static_cast<uint32_t>(rm_);
       }
-      break;
     default:
       // Can't get here.
       LOG(FATAL) << "Invalid shifter operand for thumb";
-      return 0;
+      UNREACHABLE();
   }
-  return 0;
 }
 
 uint32_t Address::encodingArm() const {
index 3e8d9c3..6286b10 100644 (file)
@@ -749,7 +749,6 @@ bool Thumb2Assembler::Is32BitDataProcessing(Condition cond ATTRIBUTE_UNUSED,
       break;
     case TEQ:
       return true;
-      break;
     case ADD:
     case SUB:
       break;
index 2031fe4..8973b9c 100644 (file)
@@ -30,7 +30,9 @@
 
 // TODO: make vixl clean wrt -Wshadow.
 #pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wunknown-pragmas"
 #pragma GCC diagnostic ignored "-Wshadow"
+#pragma GCC diagnostic ignored "-Wmissing-noreturn"
 #include "vixl/a64/macro-assembler-a64.h"
 #include "vixl/a64/disasm-a64.h"
 #pragma GCC diagnostic pop
index b27b555..e2b7341 100644 (file)
@@ -228,7 +228,6 @@ size_t DisassemblerMips::Dump(std::ostream& os, const uint8_t* instr_ptr) {
               }
               continue;  // No ", ".
             }
-            break;
           case 'I':  // Upper 16-bit immediate.
             args << reinterpret_cast<void*>((instruction & 0xffff) << 16);
             break;
index 7b289d0..1b6e6be 100644 (file)
@@ -233,7 +233,6 @@ static void DumpMips64(std::ostream& os, const uint8_t* instr_ptr) {
               }
               continue;  // No ", ".
             }
-            break;
           case 'I':  // Upper 16-bit immediate.
             args << reinterpret_cast<void*>((instruction & 0xffff) << 16);
             break;
index 3a9de5f..6c33232 100644 (file)
@@ -66,7 +66,7 @@ friend class test_set_name##_##individual_test##_Test
 // A macro to disallow new and delete operators for a class. It goes in the private: declarations.
 #define DISALLOW_ALLOCATION() \
   public: \
-    ALWAYS_INLINE void operator delete(void*, size_t) { UNREACHABLE(); } \
+    NO_RETURN ALWAYS_INLINE void operator delete(void*, size_t) { UNREACHABLE(); } \
   private: \
     void* operator new(size_t) = delete
 
index d88d262..6a8aaf2 100644 (file)
@@ -27,7 +27,7 @@
 
 namespace art {
 
-extern "C" void artDeoptimize(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+extern "C" NO_RETURN void artDeoptimize(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
   self->SetException(Thread::GetDeoptimizationException());
   self->QuickDeliverException();
index 70317bb..9644b98 100644 (file)
 namespace art {
 
 // Deliver an exception that's pending on thread helping set up a callee save frame on the way.
-extern "C" void artDeliverPendingExceptionFromCode(Thread* self)
+extern "C" NO_RETURN void artDeliverPendingExceptionFromCode(Thread* self)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
   self->QuickDeliverException();
 }
 
 // Called by generated call to throw an exception.
-extern "C" void artDeliverExceptionFromCode(mirror::Throwable* exception, Thread* self)
+extern "C" NO_RETURN void artDeliverExceptionFromCode(mirror::Throwable* exception, Thread* self)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
   /*
    * exception may be NULL, in which case this routine should
@@ -50,7 +50,7 @@ extern "C" void artDeliverExceptionFromCode(mirror::Throwable* exception, Thread
 }
 
 // Called by generated call to throw a NPE exception.
-extern "C" void artThrowNullPointerExceptionFromCode(Thread* self)
+extern "C" NO_RETURN void artThrowNullPointerExceptionFromCode(Thread* self)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
   self->NoteSignalBeingHandled();
@@ -60,7 +60,7 @@ extern "C" void artThrowNullPointerExceptionFromCode(Thread* self)
 }
 
 // Called by generated call to throw an arithmetic divide by zero exception.
-extern "C" void artThrowDivZeroFromCode(Thread* self)
+extern "C" NO_RETURN void artThrowDivZeroFromCode(Thread* self)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
   ThrowArithmeticExceptionDivideByZero();
@@ -68,14 +68,14 @@ extern "C" void artThrowDivZeroFromCode(Thread* self)
 }
 
 // Called by generated call to throw an array index out of bounds exception.
-extern "C" void artThrowArrayBoundsFromCode(int index, int length, Thread* self)
+extern "C" NO_RETURN void artThrowArrayBoundsFromCode(int index, int length, Thread* self)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
   ThrowArrayIndexOutOfBoundsException(index, length);
   self->QuickDeliverException();
 }
 
-extern "C" void artThrowStackOverflowFromCode(Thread* self)
+extern "C" NO_RETURN void artThrowStackOverflowFromCode(Thread* self)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
   self->NoteSignalBeingHandled();
@@ -84,15 +84,16 @@ extern "C" void artThrowStackOverflowFromCode(Thread* self)
   self->QuickDeliverException();
 }
 
-extern "C" void artThrowNoSuchMethodFromCode(int32_t method_idx, Thread* self)
+extern "C" NO_RETURN void artThrowNoSuchMethodFromCode(int32_t method_idx, Thread* self)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
   ThrowNoSuchMethodError(method_idx);
   self->QuickDeliverException();
 }
 
-extern "C" void artThrowClassCastException(mirror::Class* dest_type, mirror::Class* src_type,
-                                           Thread* self)
+extern "C" NO_RETURN void artThrowClassCastException(mirror::Class* dest_type,
+                                                     mirror::Class* src_type,
+                                                     Thread* self)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
   DCHECK(!dest_type->IsAssignableFrom(src_type));
@@ -100,8 +101,8 @@ extern "C" void artThrowClassCastException(mirror::Class* dest_type, mirror::Cla
   self->QuickDeliverException();
 }
 
-extern "C" void artThrowArrayStoreException(mirror::Object* array, mirror::Object* value,
-                                            Thread* self)
+extern "C" NO_RETURN void artThrowArrayStoreException(mirror::Object* array, mirror::Object* value,
+                                                      Thread* self)
     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
   ScopedQuickEntrypointChecks sqec(self);
   ThrowArrayStoreException(value->GetClass(), array->GetClass());
index 56919bd..8aa1b52 100644 (file)
@@ -804,6 +804,9 @@ class ConcurrentCopyingClearBlackPtrsVisitor {
  public:
   explicit ConcurrentCopyingClearBlackPtrsVisitor(ConcurrentCopying* cc)
       : collector_(cc) {}
+#ifndef USE_BAKER_OR_BROOKS_READ_BARRIER
+  NO_RETURN
+#endif
   void operator()(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
     DCHECK(obj != nullptr);
index a87053d..93de035 100644 (file)
@@ -232,7 +232,7 @@ class ConcurrentCopying : public GarbageCollector {
   bool IsOnAllocStack(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
   mirror::Object* GetFwdPtr(mirror::Object* from_ref)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-  void FlipThreadRoots() LOCKS_EXCLUDED(Locks::mutator_lock_);;
+  void FlipThreadRoots() LOCKS_EXCLUDED(Locks::mutator_lock_);
   void SwapStacks(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
   void RecordLiveStackFreezeSize(Thread* self);
   void ComputeUnevacFromSpaceLiveRatio();
index d80bba6..b9153c1 100644 (file)
@@ -504,7 +504,6 @@ MemMap* Heap::MapAnonymousPreferredAddress(const char* name, uint8_t* request_be
     // Retry a  second time with no specified request begin.
     request_begin = nullptr;
   }
-  return nullptr;
 }
 
 bool Heap::MayUseCollector(CollectorType type) const {
index 1a3c6f5..2ca4b3f 100644 (file)
@@ -67,7 +67,6 @@ HeapTask* TaskProcessor::GetTask(Thread* self) {
     }
   }
   UNREACHABLE();
-  return nullptr;
 }
 
 void TaskProcessor::UpdateTargetRunTime(Thread* self, HeapTask* task, uint64_t new_target_time) {
index b730670..cfc8549 100644 (file)
@@ -90,6 +90,9 @@ class MANAGED LOCKABLE Object {
   void SetClass(Class* new_klass) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   Object* GetReadBarrierPointer() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+#ifndef USE_BAKER_OR_BROOKS_READ_BARRIER
+  NO_RETURN
+#endif
   void SetReadBarrierPointer(Object* rb_ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
   bool AtomicSetReadBarrierPointer(Object* expected_rb_ptr, Object* rb_ptr)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
index 89779bc..c23f744 100644 (file)
@@ -413,7 +413,6 @@ bool ParsedOptions::Parse(const RuntimeOptions& options, bool ignore_unrecognize
     }
 
     UNREACHABLE();
-    return false;
   }
 
   using M = RuntimeArgumentMap;
index 8cccec8..7ee4118 100644 (file)
@@ -38,7 +38,7 @@ class QuickExceptionHandler {
   QuickExceptionHandler(Thread* self, bool is_deoptimization)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  ~QuickExceptionHandler() {
+  NO_RETURN ~QuickExceptionHandler() {
     LOG(FATAL) << "UNREACHABLE";  // Expected to take long jump.
     UNREACHABLE();
   }
index c6db7e5..d0f8468 100644 (file)
@@ -1075,7 +1075,6 @@ bool MethodVerifier::GetBranchOffset(uint32_t cur_offset, int32_t* pOffset, bool
       break;
     default:
       return false;
-      break;
   }
   return true;
 }