Home | History | Annotate | Download | only in arm
      1 /*
      2  * Copyright (C) 2015 The Android Open Source Project
      3  *
      4  * Licensed under the Apache License, Version 2.0 (the "License");
      5  * you may not use this file except in compliance with the License.
      6  * You may obtain a copy of the License at
      7  *
      8  *      http://www.apache.org/licenses/LICENSE-2.0
      9  *
     10  * Unless required by applicable law or agreed to in writing, software
     11  * distributed under the License is distributed on an "AS IS" BASIS,
     12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     13  * See the License for the specific language governing permissions and
     14  * limitations under the License.
     15  */
     16 
     17 #include "linker/arm/relative_patcher_thumb2.h"
     18 
     19 #include <sstream>
     20 
     21 #include "arch/arm/asm_support_arm.h"
     22 #include "art_method.h"
     23 #include "base/bit_utils.h"
     24 #include "compiled_method.h"
     25 #include "entrypoints/quick/quick_entrypoints_enum.h"
     26 #include "linker/linker_patch.h"
     27 #include "lock_word.h"
     28 #include "mirror/array-inl.h"
     29 #include "mirror/object.h"
     30 #include "read_barrier.h"
     31 #include "utils/arm/assembler_arm_vixl.h"
     32 
     33 namespace art {
     34 namespace linker {
     35 
     36 // PC displacement from patch location; Thumb2 PC is always at instruction address + 4.
     37 static constexpr int32_t kPcDisplacement = 4;
     38 
     39 // Maximum positive and negative displacement for method call measured from the patch location.
     40 // (Signed 25 bit displacement with the last bit 0 has range [-2^24, 2^24-2] measured from
     41 // the Thumb2 PC pointing right after the BL, i.e. 4 bytes later than the patch location.)
     42 constexpr uint32_t kMaxMethodCallPositiveDisplacement = (1u << 24) - 2 + kPcDisplacement;
     43 constexpr uint32_t kMaxMethodCallNegativeDisplacement = (1u << 24) - kPcDisplacement;
     44 
     45 // Maximum positive and negative displacement for a conditional branch measured from the patch
     46 // location. (Signed 21 bit displacement with the last bit 0 has range [-2^20, 2^20-2] measured
     47 // from the Thumb2 PC pointing right after the B.cond, i.e. 4 bytes later than the patch location.)
     48 constexpr uint32_t kMaxBcondPositiveDisplacement = (1u << 20) - 2u + kPcDisplacement;
     49 constexpr uint32_t kMaxBcondNegativeDisplacement = (1u << 20) - kPcDisplacement;
     50 
     51 Thumb2RelativePatcher::Thumb2RelativePatcher(RelativePatcherTargetProvider* provider)
     52     : ArmBaseRelativePatcher(provider, InstructionSet::kThumb2) {
     53 }
     54 
     55 void Thumb2RelativePatcher::PatchCall(std::vector<uint8_t>* code,
     56                                       uint32_t literal_offset,
     57                                       uint32_t patch_offset,
     58                                       uint32_t target_offset) {
     59   DCHECK_LE(literal_offset + 4u, code->size());
     60   DCHECK_EQ(literal_offset & 1u, 0u);
     61   DCHECK_EQ(patch_offset & 1u, 0u);
     62   DCHECK_EQ(target_offset & 1u, 1u);  // Thumb2 mode bit.
     63   uint32_t displacement = CalculateMethodCallDisplacement(patch_offset, target_offset & ~1u);
     64   displacement -= kPcDisplacement;  // The base PC is at the end of the 4-byte patch.
     65   DCHECK_EQ(displacement & 1u, 0u);
     66   DCHECK((displacement >> 24) == 0u || (displacement >> 24) == 255u);  // 25-bit signed.
     67   uint32_t signbit = (displacement >> 31) & 0x1;
     68   uint32_t i1 = (displacement >> 23) & 0x1;
     69   uint32_t i2 = (displacement >> 22) & 0x1;
     70   uint32_t imm10 = (displacement >> 12) & 0x03ff;
     71   uint32_t imm11 = (displacement >> 1) & 0x07ff;
     72   uint32_t j1 = i1 ^ (signbit ^ 1);
     73   uint32_t j2 = i2 ^ (signbit ^ 1);
     74   uint32_t value = (signbit << 26) | (j1 << 13) | (j2 << 11) | (imm10 << 16) | imm11;
     75   value |= 0xf000d000;  // BL
     76 
     77   // Check that we're just overwriting an existing BL.
     78   DCHECK_EQ(GetInsn32(code, literal_offset) & 0xf800d000, 0xf000d000);
     79   // Write the new BL.
     80   SetInsn32(code, literal_offset, value);
     81 }
     82 
     83 void Thumb2RelativePatcher::PatchPcRelativeReference(std::vector<uint8_t>* code,
     84                                                      const LinkerPatch& patch,
     85                                                      uint32_t patch_offset,
     86                                                      uint32_t target_offset) {
     87   uint32_t literal_offset = patch.LiteralOffset();
     88   uint32_t pc_literal_offset = patch.PcInsnOffset();
     89   uint32_t pc_base = patch_offset + (pc_literal_offset - literal_offset) + 4u /* PC adjustment */;
     90   uint32_t diff = target_offset - pc_base;
     91 
     92   uint32_t insn = GetInsn32(code, literal_offset);
     93   DCHECK_EQ(insn & 0xff7ff0ffu, 0xf2400000u);  // MOVW/MOVT, unpatched (imm16 == 0).
     94   uint32_t diff16 = ((insn & 0x00800000u) != 0u) ? (diff >> 16) : (diff & 0xffffu);
     95   uint32_t imm4 = (diff16 >> 12) & 0xfu;
     96   uint32_t imm = (diff16 >> 11) & 0x1u;
     97   uint32_t imm3 = (diff16 >> 8) & 0x7u;
     98   uint32_t imm8 = diff16 & 0xffu;
     99   insn = (insn & 0xfbf08f00u) | (imm << 26) | (imm4 << 16) | (imm3 << 12) | imm8;
    100   SetInsn32(code, literal_offset, insn);
    101 }
    102 
    103 void Thumb2RelativePatcher::PatchBakerReadBarrierBranch(std::vector<uint8_t>* code,
    104                                                         const LinkerPatch& patch,
    105                                                         uint32_t patch_offset) {
    106   DCHECK_ALIGNED(patch_offset, 2u);
    107   uint32_t literal_offset = patch.LiteralOffset();
    108   DCHECK_ALIGNED(literal_offset, 2u);
    109   DCHECK_LT(literal_offset, code->size());
    110   uint32_t insn = GetInsn32(code, literal_offset);
    111   DCHECK_EQ(insn, 0xf0408000);  // BNE +0 (unpatched)
    112   ThunkKey key = GetBakerThunkKey(patch);
    113   if (kIsDebugBuild) {
    114     const uint32_t encoded_data = key.GetCustomValue1();
    115     BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
    116     // Check that the next instruction matches the expected LDR.
    117     switch (kind) {
    118       case BakerReadBarrierKind::kField: {
    119         BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
    120         if (width == BakerReadBarrierWidth::kWide) {
    121           DCHECK_GE(code->size() - literal_offset, 8u);
    122           uint32_t next_insn = GetInsn32(code, literal_offset + 4u);
    123           // LDR (immediate), encoding T3, with correct base_reg.
    124           CheckValidReg((next_insn >> 12) & 0xfu);  // Check destination register.
    125           const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
    126           CHECK_EQ(next_insn & 0xffff0000u, 0xf8d00000u | (base_reg << 16));
    127         } else {
    128           DCHECK_GE(code->size() - literal_offset, 6u);
    129           uint32_t next_insn = GetInsn16(code, literal_offset + 4u);
    130           // LDR (immediate), encoding T1, with correct base_reg.
    131           CheckValidReg(next_insn & 0x7u);  // Check destination register.
    132           const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
    133           CHECK_EQ(next_insn & 0xf838u, 0x6800u | (base_reg << 3));
    134         }
    135         break;
    136       }
    137       case BakerReadBarrierKind::kArray: {
    138         DCHECK_GE(code->size() - literal_offset, 8u);
    139         uint32_t next_insn = GetInsn32(code, literal_offset + 4u);
    140         // LDR (register) with correct base_reg, S=1 and option=011 (LDR Wt, [Xn, Xm, LSL #2]).
    141         CheckValidReg((next_insn >> 12) & 0xfu);  // Check destination register.
    142         const uint32_t base_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
    143         CHECK_EQ(next_insn & 0xffff0ff0u, 0xf8500020u | (base_reg << 16));
    144         CheckValidReg(next_insn & 0xf);  // Check index register
    145         break;
    146       }
    147       case BakerReadBarrierKind::kGcRoot: {
    148         BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
    149         if (width == BakerReadBarrierWidth::kWide) {
    150           DCHECK_GE(literal_offset, 4u);
    151           uint32_t prev_insn = GetInsn32(code, literal_offset - 4u);
    152           // LDR (immediate), encoding T3, with correct root_reg.
    153           const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
    154           CHECK_EQ(prev_insn & 0xfff0f000u, 0xf8d00000u | (root_reg << 12));
    155         } else {
    156           DCHECK_GE(literal_offset, 2u);
    157           uint32_t prev_insn = GetInsn16(code, literal_offset - 2u);
    158           // LDR (immediate), encoding T1, with correct root_reg.
    159           const uint32_t root_reg = BakerReadBarrierFirstRegField::Decode(encoded_data);
    160           CHECK_EQ(prev_insn & 0xf807u, 0x6800u | root_reg);
    161         }
    162         break;
    163       }
    164       default:
    165         LOG(FATAL) << "Unexpected type: " << static_cast<uint32_t>(key.GetType());
    166         UNREACHABLE();
    167     }
    168   }
    169   uint32_t target_offset = GetThunkTargetOffset(key, patch_offset);
    170   DCHECK_ALIGNED(target_offset, 4u);
    171   uint32_t disp = target_offset - (patch_offset + kPcDisplacement);
    172   DCHECK((disp >> 20) == 0u || (disp >> 20) == 0xfffu);   // 21-bit signed.
    173   insn |= ((disp << (26 - 20)) & 0x04000000u) |           // Shift bit 20 to 26, "S".
    174           ((disp >> (19 - 11)) & 0x00000800u) |           // Shift bit 19 to 13, "J1".
    175           ((disp >> (18 - 13)) & 0x00002000u) |           // Shift bit 18 to 11, "J2".
    176           ((disp << (16 - 12)) & 0x003f0000u) |           // Shift bits 12-17 to 16-25, "imm6".
    177           ((disp >> (1 - 0)) & 0x000007ffu);              // Shift bits 1-12 to 0-11, "imm11".
    178   SetInsn32(code, literal_offset, insn);
    179 }
    180 
    181 #define __ assembler.GetVIXLAssembler()->
    182 
    183 static void EmitGrayCheckAndFastPath(arm::ArmVIXLAssembler& assembler,
    184                                      vixl::aarch32::Register base_reg,
    185                                      vixl::aarch32::MemOperand& lock_word,
    186                                      vixl::aarch32::Label* slow_path,
    187                                      int32_t raw_ldr_offset) {
    188   using namespace vixl::aarch32;  // NOLINT(build/namespaces)
    189   // Load the lock word containing the rb_state.
    190   __ Ldr(ip, lock_word);
    191   // Given the numeric representation, it's enough to check the low bit of the rb_state.
    192   static_assert(ReadBarrier::WhiteState() == 0, "Expecting white to have value 0");
    193   static_assert(ReadBarrier::GrayState() == 1, "Expecting gray to have value 1");
    194   __ Tst(ip, Operand(LockWord::kReadBarrierStateMaskShifted));
    195   __ B(ne, slow_path, /* is_far_target */ false);
    196   __ Add(lr, lr, raw_ldr_offset);
    197   // Introduce a dependency on the lock_word including rb_state,
    198   // to prevent load-load reordering, and without using
    199   // a memory barrier (which would be more expensive).
    200   __ Add(base_reg, base_reg, Operand(ip, LSR, 32));
    201   __ Bx(lr);          // And return back to the function.
    202   // Note: The fake dependency is unnecessary for the slow path.
    203 }
    204 
    205 // Load the read barrier introspection entrypoint in register `entrypoint`
    206 static void LoadReadBarrierMarkIntrospectionEntrypoint(arm::ArmVIXLAssembler& assembler,
    207                                                        vixl::aarch32::Register entrypoint) {
    208   using vixl::aarch32::MemOperand;
    209   using vixl::aarch32::ip;
    210   // Thread Register.
    211   const vixl::aarch32::Register tr = vixl::aarch32::r9;
    212 
    213   // The register where the read barrier introspection entrypoint is loaded
    214   // is fixed: `Thumb2RelativePatcher::kBakerCcEntrypointRegister` (R4).
    215   DCHECK_EQ(entrypoint.GetCode(), Thumb2RelativePatcher::kBakerCcEntrypointRegister);
    216   // entrypoint = Thread::Current()->pReadBarrierMarkReg12, i.e. pReadBarrierMarkIntrospection.
    217   DCHECK_EQ(ip.GetCode(), 12u);
    218   const int32_t entry_point_offset =
    219       Thread::ReadBarrierMarkEntryPointsOffset<kArmPointerSize>(ip.GetCode());
    220   __ Ldr(entrypoint, MemOperand(tr, entry_point_offset));
    221 }
    222 
    223 void Thumb2RelativePatcher::CompileBakerReadBarrierThunk(arm::ArmVIXLAssembler& assembler,
    224                                                          uint32_t encoded_data) {
    225   using namespace vixl::aarch32;  // NOLINT(build/namespaces)
    226   BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
    227   switch (kind) {
    228     case BakerReadBarrierKind::kField: {
    229       // Check if the holder is gray and, if not, add fake dependency to the base register
    230       // and return to the LDR instruction to load the reference. Otherwise, use introspection
    231       // to load the reference and call the entrypoint (in kBakerCcEntrypointRegister)
    232       // that performs further checks on the reference and marks it if needed.
    233       Register base_reg(BakerReadBarrierFirstRegField::Decode(encoded_data));
    234       CheckValidReg(base_reg.GetCode());
    235       Register holder_reg(BakerReadBarrierSecondRegField::Decode(encoded_data));
    236       CheckValidReg(holder_reg.GetCode());
    237       BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
    238       UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
    239       temps.Exclude(ip);
    240       // If base_reg differs from holder_reg, the offset was too large and we must have
    241       // emitted an explicit null check before the load. Otherwise, we need to null-check
    242       // the holder as we do not necessarily do that check before going to the thunk.
    243       vixl::aarch32::Label throw_npe;
    244       if (holder_reg.Is(base_reg)) {
    245         __ CompareAndBranchIfZero(holder_reg, &throw_npe, /* is_far_target */ false);
    246       }
    247       vixl::aarch32::Label slow_path;
    248       MemOperand lock_word(holder_reg, mirror::Object::MonitorOffset().Int32Value());
    249       const int32_t raw_ldr_offset = (width == BakerReadBarrierWidth::kWide)
    250           ? BAKER_MARK_INTROSPECTION_FIELD_LDR_WIDE_OFFSET
    251           : BAKER_MARK_INTROSPECTION_FIELD_LDR_NARROW_OFFSET;
    252       EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, raw_ldr_offset);
    253       __ Bind(&slow_path);
    254       const int32_t ldr_offset = /* Thumb state adjustment (LR contains Thumb state). */ -1 +
    255                                  raw_ldr_offset;
    256       Register ep_reg(kBakerCcEntrypointRegister);
    257       LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ep_reg);
    258       if (width == BakerReadBarrierWidth::kWide) {
    259         MemOperand ldr_half_address(lr, ldr_offset + 2);
    260         __ Ldrh(ip, ldr_half_address);        // Load the LDR immediate half-word with "Rt | imm12".
    261         __ Ubfx(ip, ip, 0, 12);               // Extract the offset imm12.
    262         __ Ldr(ip, MemOperand(base_reg, ip));   // Load the reference.
    263       } else {
    264         MemOperand ldr_address(lr, ldr_offset);
    265         __ Ldrh(ip, ldr_address);             // Load the LDR immediate, encoding T1.
    266         __ Add(ep_reg,                        // Adjust the entrypoint address to the entrypoint
    267                ep_reg,                        // for narrow LDR.
    268                Operand(BAKER_MARK_INTROSPECTION_FIELD_LDR_NARROW_ENTRYPOINT_OFFSET));
    269         __ Ubfx(ip, ip, 6, 5);                // Extract the imm5, i.e. offset / 4.
    270         __ Ldr(ip, MemOperand(base_reg, ip, LSL, 2));   // Load the reference.
    271       }
    272       // Do not unpoison. With heap poisoning enabled, the entrypoint expects a poisoned reference.
    273       __ Bx(ep_reg);                          // Jump to the entrypoint.
    274       if (holder_reg.Is(base_reg)) {
    275         // Add null check slow path. The stack map is at the address pointed to by LR.
    276         __ Bind(&throw_npe);
    277         int32_t offset = GetThreadOffset<kArmPointerSize>(kQuickThrowNullPointer).Int32Value();
    278         __ Ldr(ip, MemOperand(/* Thread* */ vixl::aarch32::r9, offset));
    279         __ Bx(ip);
    280       }
    281       break;
    282     }
    283     case BakerReadBarrierKind::kArray: {
    284       Register base_reg(BakerReadBarrierFirstRegField::Decode(encoded_data));
    285       CheckValidReg(base_reg.GetCode());
    286       DCHECK_EQ(kInvalidEncodedReg, BakerReadBarrierSecondRegField::Decode(encoded_data));
    287       DCHECK(BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide);
    288       UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
    289       temps.Exclude(ip);
    290       vixl::aarch32::Label slow_path;
    291       int32_t data_offset =
    292           mirror::Array::DataOffset(Primitive::ComponentSize(Primitive::kPrimNot)).Int32Value();
    293       MemOperand lock_word(base_reg, mirror::Object::MonitorOffset().Int32Value() - data_offset);
    294       DCHECK_LT(lock_word.GetOffsetImmediate(), 0);
    295       const int32_t raw_ldr_offset = BAKER_MARK_INTROSPECTION_ARRAY_LDR_OFFSET;
    296       EmitGrayCheckAndFastPath(assembler, base_reg, lock_word, &slow_path, raw_ldr_offset);
    297       __ Bind(&slow_path);
    298       const int32_t ldr_offset = /* Thumb state adjustment (LR contains Thumb state). */ -1 +
    299                                  raw_ldr_offset;
    300       MemOperand ldr_address(lr, ldr_offset + 2);
    301       __ Ldrb(ip, ldr_address);               // Load the LDR (register) byte with "00 | imm2 | Rm",
    302                                               // i.e. Rm+32 because the scale in imm2 is 2.
    303       Register ep_reg(kBakerCcEntrypointRegister);
    304       LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ep_reg);
    305       __ Bfi(ep_reg, ip, 3, 6);               // Insert ip to the entrypoint address to create
    306                                               // a switch case target based on the index register.
    307       __ Mov(ip, base_reg);                   // Move the base register to ip0.
    308       __ Bx(ep_reg);                          // Jump to the entrypoint's array switch case.
    309       break;
    310     }
    311     case BakerReadBarrierKind::kGcRoot: {
    312       // Check if the reference needs to be marked and if so (i.e. not null, not marked yet
    313       // and it does not have a forwarding address), call the correct introspection entrypoint;
    314       // otherwise return the reference (or the extracted forwarding address).
    315       // There is no gray bit check for GC roots.
    316       Register root_reg(BakerReadBarrierFirstRegField::Decode(encoded_data));
    317       CheckValidReg(root_reg.GetCode());
    318       DCHECK_EQ(kInvalidEncodedReg, BakerReadBarrierSecondRegField::Decode(encoded_data));
    319       BakerReadBarrierWidth width = BakerReadBarrierWidthField::Decode(encoded_data);
    320       UseScratchRegisterScope temps(assembler.GetVIXLAssembler());
    321       temps.Exclude(ip);
    322       vixl::aarch32::Label return_label, not_marked, forwarding_address;
    323       __ CompareAndBranchIfZero(root_reg, &return_label, /* is_far_target */ false);
    324       MemOperand lock_word(root_reg, mirror::Object::MonitorOffset().Int32Value());
    325       __ Ldr(ip, lock_word);
    326       __ Tst(ip, LockWord::kMarkBitStateMaskShifted);
    327       __ B(eq, &not_marked);
    328       __ Bind(&return_label);
    329       __ Bx(lr);
    330       __ Bind(&not_marked);
    331       static_assert(LockWord::kStateShift == 30 && LockWord::kStateForwardingAddress == 3,
    332                     "To use 'CMP ip, #modified-immediate; BHS', we need the lock word state in "
    333                     " the highest bits and the 'forwarding address' state to have all bits set");
    334       __ Cmp(ip, Operand(0xc0000000));
    335       __ B(hs, &forwarding_address);
    336       Register ep_reg(kBakerCcEntrypointRegister);
    337       LoadReadBarrierMarkIntrospectionEntrypoint(assembler, ep_reg);
    338       // Adjust the art_quick_read_barrier_mark_introspection address in kBakerCcEntrypointRegister
    339       // to art_quick_read_barrier_mark_introspection_gc_roots.
    340       int32_t entrypoint_offset = (width == BakerReadBarrierWidth::kWide)
    341           ? BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_WIDE_ENTRYPOINT_OFFSET
    342           : BAKER_MARK_INTROSPECTION_GC_ROOT_LDR_NARROW_ENTRYPOINT_OFFSET;
    343       __ Add(ep_reg, ep_reg, Operand(entrypoint_offset));
    344       __ Mov(ip, root_reg);
    345       __ Bx(ep_reg);
    346       __ Bind(&forwarding_address);
    347       __ Lsl(root_reg, ip, LockWord::kForwardingAddressShift);
    348       __ Bx(lr);
    349       break;
    350     }
    351     default:
    352       LOG(FATAL) << "Unexpected kind: " << static_cast<uint32_t>(kind);
    353       UNREACHABLE();
    354   }
    355 }
    356 
    357 std::vector<uint8_t> Thumb2RelativePatcher::CompileThunk(const ThunkKey& key) {
    358   ArenaPool pool;
    359   ArenaAllocator allocator(&pool);
    360   arm::ArmVIXLAssembler assembler(&allocator);
    361 
    362   switch (key.GetType()) {
    363     case ThunkType::kMethodCall:
    364       // The thunk just uses the entry point in the ArtMethod. This works even for calls
    365       // to the generic JNI and interpreter trampolines.
    366       assembler.LoadFromOffset(
    367           arm::kLoadWord,
    368           vixl::aarch32::pc,
    369           vixl::aarch32::r0,
    370           ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArmPointerSize).Int32Value());
    371       __ Bkpt(0);
    372       break;
    373     case ThunkType::kBakerReadBarrier:
    374       CompileBakerReadBarrierThunk(assembler, key.GetCustomValue1());
    375       break;
    376   }
    377 
    378   assembler.FinalizeCode();
    379   std::vector<uint8_t> thunk_code(assembler.CodeSize());
    380   MemoryRegion code(thunk_code.data(), thunk_code.size());
    381   assembler.FinalizeInstructions(code);
    382   return thunk_code;
    383 }
    384 
    385 std::string Thumb2RelativePatcher::GetThunkDebugName(const ThunkKey& key) {
    386   switch (key.GetType()) {
    387     case ThunkType::kMethodCall:
    388       return "MethodCallThunk";
    389 
    390     case ThunkType::kBakerReadBarrier: {
    391       uint32_t encoded_data = key.GetCustomValue1();
    392       BakerReadBarrierKind kind = BakerReadBarrierKindField::Decode(encoded_data);
    393       std::ostringstream oss;
    394       oss << "BakerReadBarrierThunk";
    395       switch (kind) {
    396         case BakerReadBarrierKind::kField:
    397           oss << "Field";
    398           if (BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide) {
    399             oss << "Wide";
    400           }
    401           oss << "_r" << BakerReadBarrierFirstRegField::Decode(encoded_data)
    402               << "_r" << BakerReadBarrierSecondRegField::Decode(encoded_data);
    403           break;
    404         case BakerReadBarrierKind::kArray:
    405           oss << "Array_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
    406           DCHECK_EQ(kInvalidEncodedReg, BakerReadBarrierSecondRegField::Decode(encoded_data));
    407           DCHECK(BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide);
    408           break;
    409         case BakerReadBarrierKind::kGcRoot:
    410           oss << "GcRoot";
    411           if (BakerReadBarrierWidthField::Decode(encoded_data) == BakerReadBarrierWidth::kWide) {
    412             oss << "Wide";
    413           }
    414           oss << "_r" << BakerReadBarrierFirstRegField::Decode(encoded_data);
    415           DCHECK_EQ(kInvalidEncodedReg, BakerReadBarrierSecondRegField::Decode(encoded_data));
    416           break;
    417       }
    418       return oss.str();
    419     }
    420   }
    421 }
    422 
    423 #undef __
    424 
    425 uint32_t Thumb2RelativePatcher::MaxPositiveDisplacement(const ThunkKey& key) {
    426   switch (key.GetType()) {
    427     case ThunkType::kMethodCall:
    428       return kMaxMethodCallPositiveDisplacement;
    429     case ThunkType::kBakerReadBarrier:
    430       return kMaxBcondPositiveDisplacement;
    431   }
    432 }
    433 
    434 uint32_t Thumb2RelativePatcher::MaxNegativeDisplacement(const ThunkKey& key) {
    435   switch (key.GetType()) {
    436     case ThunkType::kMethodCall:
    437       return kMaxMethodCallNegativeDisplacement;
    438     case ThunkType::kBakerReadBarrier:
    439       return kMaxBcondNegativeDisplacement;
    440   }
    441 }
    442 
    443 void Thumb2RelativePatcher::SetInsn32(std::vector<uint8_t>* code, uint32_t offset, uint32_t value) {
    444   DCHECK_LE(offset + 4u, code->size());
    445   DCHECK_ALIGNED(offset, 2u);
    446   uint8_t* addr = &(*code)[offset];
    447   addr[0] = (value >> 16) & 0xff;
    448   addr[1] = (value >> 24) & 0xff;
    449   addr[2] = (value >> 0) & 0xff;
    450   addr[3] = (value >> 8) & 0xff;
    451 }
    452 
    453 uint32_t Thumb2RelativePatcher::GetInsn32(ArrayRef<const uint8_t> code, uint32_t offset) {
    454   DCHECK_LE(offset + 4u, code.size());
    455   DCHECK_ALIGNED(offset, 2u);
    456   const uint8_t* addr = &code[offset];
    457   return
    458       (static_cast<uint32_t>(addr[0]) << 16) +
    459       (static_cast<uint32_t>(addr[1]) << 24) +
    460       (static_cast<uint32_t>(addr[2]) << 0)+
    461       (static_cast<uint32_t>(addr[3]) << 8);
    462 }
    463 
    464 template <typename Vector>
    465 uint32_t Thumb2RelativePatcher::GetInsn32(Vector* code, uint32_t offset) {
    466   static_assert(std::is_same<typename Vector::value_type, uint8_t>::value, "Invalid value type");
    467   return GetInsn32(ArrayRef<const uint8_t>(*code), offset);
    468 }
    469 
    470 uint32_t Thumb2RelativePatcher::GetInsn16(ArrayRef<const uint8_t> code, uint32_t offset) {
    471   DCHECK_LE(offset + 2u, code.size());
    472   DCHECK_ALIGNED(offset, 2u);
    473   const uint8_t* addr = &code[offset];
    474   return (static_cast<uint32_t>(addr[0]) << 0) + (static_cast<uint32_t>(addr[1]) << 8);
    475 }
    476 
    477 template <typename Vector>
    478 uint32_t Thumb2RelativePatcher::GetInsn16(Vector* code, uint32_t offset) {
    479   static_assert(std::is_same<typename Vector::value_type, uint8_t>::value, "Invalid value type");
    480   return GetInsn16(ArrayRef<const uint8_t>(*code), offset);
    481 }
    482 
    483 }  // namespace linker
    484 }  // namespace art
    485