1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_ 18 #define ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_ 19 20 #include "register_line.h" 21 22 #include "base/logging.h" // For VLOG. 23 #include "debug_print.h" 24 #include "method_verifier.h" 25 #include "reg_type_cache-inl.h" 26 27 namespace art { 28 namespace verifier { 29 30 // Should we dump a warning on failures to verify balanced locking? That would be an indication to 31 // developers that their code will be slow. 32 static constexpr bool kDumpLockFailures = true; 33 34 inline const RegType& RegisterLine::GetRegisterType(MethodVerifier* verifier, uint32_t vsrc) const { 35 // The register index was validated during the static pass, so we don't need to check it here. 36 DCHECK_LT(vsrc, num_regs_); 37 return verifier->GetRegTypeCache()->GetFromId(line_[vsrc]); 38 } 39 40 template <LockOp kLockOp> 41 inline bool RegisterLine::SetRegisterType(MethodVerifier* verifier, uint32_t vdst, 42 const RegType& new_type) { 43 DCHECK_LT(vdst, num_regs_); 44 if (new_type.IsLowHalf() || new_type.IsHighHalf()) { 45 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "Expected category1 register type not '" 46 << new_type << "'"; 47 return false; 48 } else { 49 // Note: previously we failed when asked to set a conflict. However, conflicts are OK as long 50 // as they are not accessed, and our backends can handle this nowadays. 51 line_[vdst] = new_type.GetId(); 52 } 53 switch (kLockOp) { 54 case LockOp::kClear: 55 // Clear the monitor entry bits for this register. 56 ClearAllRegToLockDepths(vdst); 57 break; 58 case LockOp::kKeep: 59 // Should only be doing this with reference types. 60 DCHECK(new_type.IsReferenceTypes()); 61 break; 62 } 63 return true; 64 } 65 66 inline bool RegisterLine::SetRegisterTypeWide(MethodVerifier* verifier, uint32_t vdst, 67 const RegType& new_type1, 68 const RegType& new_type2) { 69 DCHECK_LT(vdst + 1, num_regs_); 70 if (!new_type1.CheckWidePair(new_type2)) { 71 verifier->Fail(VERIFY_ERROR_BAD_CLASS_SOFT) << "Invalid wide pair '" 72 << new_type1 << "' '" << new_type2 << "'"; 73 return false; 74 } else { 75 line_[vdst] = new_type1.GetId(); 76 line_[vdst + 1] = new_type2.GetId(); 77 } 78 // Clear the monitor entry bits for this register. 79 ClearAllRegToLockDepths(vdst); 80 ClearAllRegToLockDepths(vdst + 1); 81 return true; 82 } 83 84 inline void RegisterLine::SetResultTypeToUnknown(RegTypeCache* reg_types) { 85 result_[0] = reg_types->Undefined().GetId(); 86 result_[1] = result_[0]; 87 } 88 89 inline void RegisterLine::SetResultRegisterType(MethodVerifier* verifier, const RegType& new_type) { 90 DCHECK(!new_type.IsLowHalf()); 91 DCHECK(!new_type.IsHighHalf()); 92 result_[0] = new_type.GetId(); 93 result_[1] = verifier->GetRegTypeCache()->Undefined().GetId(); 94 } 95 96 inline void RegisterLine::SetResultRegisterTypeWide(const RegType& new_type1, 97 const RegType& new_type2) { 98 DCHECK(new_type1.CheckWidePair(new_type2)); 99 result_[0] = new_type1.GetId(); 100 result_[1] = new_type2.GetId(); 101 } 102 103 inline void RegisterLine::CopyRegister1(MethodVerifier* verifier, uint32_t vdst, uint32_t vsrc, 104 TypeCategory cat) { 105 DCHECK(cat == kTypeCategory1nr || cat == kTypeCategoryRef); 106 const RegType& type = GetRegisterType(verifier, vsrc); 107 if (!SetRegisterType<LockOp::kClear>(verifier, vdst, type)) { 108 return; 109 } 110 if (!type.IsConflict() && // Allow conflicts to be copied around. 111 ((cat == kTypeCategory1nr && !type.IsCategory1Types()) || 112 (cat == kTypeCategoryRef && !type.IsReferenceTypes()))) { 113 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "copy1 v" << vdst << "<-v" << vsrc << " type=" << type 114 << " cat=" << static_cast<int>(cat); 115 } else if (cat == kTypeCategoryRef) { 116 CopyRegToLockDepth(vdst, vsrc); 117 } 118 } 119 120 inline void RegisterLine::CopyRegister2(MethodVerifier* verifier, uint32_t vdst, uint32_t vsrc) { 121 const RegType& type_l = GetRegisterType(verifier, vsrc); 122 const RegType& type_h = GetRegisterType(verifier, vsrc + 1); 123 124 if (!type_l.CheckWidePair(type_h)) { 125 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "copy2 v" << vdst << "<-v" << vsrc 126 << " type=" << type_l << "/" << type_h; 127 } else { 128 SetRegisterTypeWide(verifier, vdst, type_l, type_h); 129 } 130 } 131 132 inline bool RegisterLine::VerifyRegisterType(MethodVerifier* verifier, uint32_t vsrc, 133 const RegType& check_type) { 134 // Verify the src register type against the check type refining the type of the register 135 const RegType& src_type = GetRegisterType(verifier, vsrc); 136 if (UNLIKELY(!check_type.IsAssignableFrom(src_type, verifier))) { 137 enum VerifyError fail_type; 138 if (!check_type.IsNonZeroReferenceTypes() || !src_type.IsNonZeroReferenceTypes()) { 139 // Hard fail if one of the types is primitive, since they are concretely known. 140 fail_type = VERIFY_ERROR_BAD_CLASS_HARD; 141 } else if (check_type.IsUninitializedTypes() || src_type.IsUninitializedTypes()) { 142 // Hard fail for uninitialized types, which don't match anything but themselves. 143 fail_type = VERIFY_ERROR_BAD_CLASS_HARD; 144 } else if (check_type.IsUnresolvedTypes() || src_type.IsUnresolvedTypes()) { 145 fail_type = VERIFY_ERROR_NO_CLASS; 146 } else { 147 fail_type = VERIFY_ERROR_BAD_CLASS_SOFT; 148 } 149 verifier->Fail(fail_type) << "register v" << vsrc << " has type " 150 << src_type << " but expected " << check_type; 151 if (check_type.IsNonZeroReferenceTypes() && 152 !check_type.IsUnresolvedTypes() && 153 check_type.HasClass() && 154 src_type.IsNonZeroReferenceTypes() && 155 !src_type.IsUnresolvedTypes() && 156 src_type.HasClass()) { 157 DumpB77342775DebugData(check_type.GetClass(), src_type.GetClass()); 158 } 159 return false; 160 } 161 if (check_type.IsLowHalf()) { 162 const RegType& src_type_h = GetRegisterType(verifier, vsrc + 1); 163 if (UNLIKELY(!src_type.CheckWidePair(src_type_h))) { 164 verifier->Fail(VERIFY_ERROR_BAD_CLASS_HARD) << "wide register v" << vsrc << " has type " 165 << src_type << "/" << src_type_h; 166 return false; 167 } 168 } 169 // The register at vsrc has a defined type, we know the lower-upper-bound, but this is less 170 // precise than the subtype in vsrc so leave it for reference types. For primitive types 171 // if they are a defined type then they are as precise as we can get, however, for constant 172 // types we may wish to refine them. Unfortunately constant propagation has rendered this useless. 173 return true; 174 } 175 176 inline void RegisterLine::VerifyMonitorStackEmpty(MethodVerifier* verifier) const { 177 if (MonitorStackDepth() != 0) { 178 verifier->Fail(VERIFY_ERROR_LOCKING); 179 if (kDumpLockFailures) { 180 VLOG(verifier) << "expected empty monitor stack in " 181 << verifier->GetMethodReference().PrettyMethod(); 182 } 183 } 184 } 185 186 inline size_t RegisterLine::ComputeSize(size_t num_regs) { 187 return OFFSETOF_MEMBER(RegisterLine, line_) + num_regs * sizeof(uint16_t); 188 } 189 190 inline RegisterLine* RegisterLine::Create(size_t num_regs, 191 ScopedArenaAllocator& allocator, 192 RegTypeCache* reg_types) { 193 void* memory = allocator.Alloc(ComputeSize(num_regs)); 194 return new (memory) RegisterLine(num_regs, allocator, reg_types); 195 } 196 197 inline RegisterLine::RegisterLine(size_t num_regs, 198 ScopedArenaAllocator& allocator, 199 RegTypeCache* reg_types) 200 : num_regs_(num_regs), 201 monitors_(allocator.Adapter(kArenaAllocVerifier)), 202 reg_to_lock_depths_(std::less<uint32_t>(), 203 allocator.Adapter(kArenaAllocVerifier)), 204 this_initialized_(false) { 205 std::uninitialized_fill_n(line_, num_regs_, 0u); 206 SetResultTypeToUnknown(reg_types); 207 } 208 209 inline void RegisterLine::ClearRegToLockDepth(size_t reg, size_t depth) { 210 CHECK_LT(depth, 32u); 211 DCHECK(IsSetLockDepth(reg, depth)); 212 auto it = reg_to_lock_depths_.find(reg); 213 DCHECK(it != reg_to_lock_depths_.end()); 214 uint32_t depths = it->second ^ (1 << depth); 215 if (depths != 0) { 216 it->second = depths; 217 } else { 218 reg_to_lock_depths_.erase(it); 219 } 220 // Need to unlock every register at the same lock depth. These are aliased locks. 221 uint32_t mask = 1 << depth; 222 for (auto& pair : reg_to_lock_depths_) { 223 if ((pair.second & mask) != 0) { 224 VLOG(verifier) << "Also unlocking " << pair.first; 225 pair.second ^= mask; 226 } 227 } 228 } 229 230 inline void RegisterLineArenaDelete::operator()(RegisterLine* ptr) const { 231 if (ptr != nullptr) { 232 ptr->~RegisterLine(); 233 ProtectMemory(ptr, RegisterLine::ComputeSize(ptr->NumRegs())); 234 } 235 } 236 237 } // namespace verifier 238 } // namespace art 239 240 #endif // ART_RUNTIME_VERIFIER_REGISTER_LINE_INL_H_ 241